From 5d4324ddb6ed5abad231295d63e28cdf779e5e07 Mon Sep 17 00:00:00 2001 From: Mike Taves Date: Tue, 31 Aug 2021 06:00:11 +1200 Subject: [PATCH] style: use f-strings to format str (#1212) --- .docs/conf.py | 8 +- .docs/create_rstfiles.py | 6 +- .docs/create_tutorials.py | 2 +- .docs/pysrc/tutorial1.py | 6 +- .docs/pysrc/tutorial2.py | 10 +- autotest/autotest_notebooks.py | 2 +- autotest/autotest_scripts.py | 12 +- autotest/get_exes.py | 10 +- autotest/t004_test_utilarray.py | 6 +- autotest/t005_test.py | 8 +- autotest/t006_test.py | 2 +- autotest/t007_test.py | 147 ++++++--------- autotest/t008_test.py | 12 +- autotest/t010_test.py | 8 +- autotest/t011_test.py | 5 +- autotest/t012_test.py | 44 ++--- autotest/t013_test.py | 68 ++----- autotest/t014_test.py | 2 +- autotest/t016_test.py | 18 +- autotest/t017_test.py | 20 +- autotest/t019_test.py | 28 ++- autotest/t020_test.py | 10 +- autotest/t023_test.py | 2 +- autotest/t024_test.py | 2 +- autotest/t025_test.py | 30 ++- autotest/t026_test.py | 4 +- autotest/t027_test.py | 16 +- autotest/t028_test.py | 8 +- autotest/t029_test.py | 100 ++++------ autotest/t031_test.py | 8 +- autotest/t035_test.py | 16 +- autotest/t036_test.py | 10 +- autotest/t037_test.py | 4 +- autotest/t038_test.py | 2 +- autotest/t039_test.py | 12 +- autotest/t040_test.py | 10 +- autotest/t041_test.py | 26 ++- autotest/t043_test.py | 5 +- autotest/t044_test.py | 8 +- autotest/t045_test.py | 8 +- autotest/t046_test.py | 12 +- autotest/t047_test.py | 30 +-- autotest/t048_test.py | 8 +- autotest/t049_test.py | 69 +++---- autotest/t050_test.py | 102 +++++------ autotest/t051_test.py | 6 +- autotest/t052_test.py | 14 +- autotest/t054_test_mfnwt.py | 8 +- autotest/t057_test_mp7.py | 33 ++-- autotest/t058_test_mp7.py | 77 ++++---- autotest/t059_test_mp7.py | 12 +- autotest/t060_test_lkt.py | 4 +- autotest/t061_test_gridgen.py | 5 +- autotest/t062_test_intersect.py | 16 +- autotest/t063_test_lgrutil.py | 6 +- autotest/t064_test_performance.py | 20 +- autotest/t065_test_gridintersect.py | 6 +- autotest/t067_test_ulstrd.py | 24 +-- autotest/t070_test_quasi3layers.py | 6 +- autotest/t072_test_spedis.py | 18 +- autotest/t073_test_cvfd.py | 2 +- autotest/t075_ugridtests.py | 20 +- autotest/t078_lake_connections.py | 37 ++-- autotest/t501_test.py | 8 +- autotest/t502_test.py | 12 +- autotest/t503_test.py | 12 +- autotest/t504_test.py | 18 +- autotest/t505_test.py | 131 ++++++------- autotest/t506_test.py | 18 +- autotest/t550_test.py | 19 +- .../Testing/flopy3_CrossSectionExample.py | 6 +- examples/Testing/testunitcbc.py | 4 +- examples/Tutorials/modflow/tutorial01_mf.py | 6 +- examples/Tutorials/modflow/tutorial02_mf.py | 8 +- examples/Tutorials/modflow6/tutorial01_mf6.py | 10 +- .../modflow6data/tutorial01_mf6_data.py | 4 +- .../modflow6data/tutorial02_mf6_data.py | 2 +- .../modflow6data/tutorial03_mf6_data.py | 2 +- .../modflow6data/tutorial04_mf6_data.py | 2 +- .../modflow6data/tutorial05_mf6_data.py | 2 +- .../modflow6data/tutorial06_mf6_data.py | 2 +- .../modflow6data/tutorial07_mf6_data.py | 2 +- .../modflow6data/tutorial08_mf6_data.py | 2 +- examples/common/setup_pmv_demo.py | 30 +-- examples/scripts/flopy_henry.py | 8 +- examples/scripts/flopy_lake_example.py | 12 +- examples/scripts/flopy_swi2_ex1.py | 10 +- examples/scripts/flopy_swi2_ex2.py | 8 +- examples/scripts/flopy_swi2_ex3.py | 10 +- examples/scripts/flopy_swi2_ex4.py | 20 +- examples/scripts/flopy_swi2_ex5.py | 18 +- flopy/discretization/grid.py | 14 +- flopy/discretization/structuredgrid.py | 9 +- flopy/discretization/unstructuredgrid.py | 23 +-- flopy/discretization/vertexgrid.py | 13 +- flopy/export/metadata.py | 8 +- flopy/export/netcdf.py | 153 +++++++--------- flopy/export/shapefile_utils.py | 46 +++-- flopy/export/utils.py | 146 ++++++--------- flopy/export/vtk.py | 60 +++--- flopy/mbase.py | 84 ++++----- flopy/mf6/coordinates/modelgrid.py | 10 +- flopy/mf6/coordinates/simulationtime.py | 3 +- flopy/mf6/data/mfdata.py | 22 +-- flopy/mf6/data/mfdataarray.py | 77 +++----- flopy/mf6/data/mfdatalist.py | 31 ++-- flopy/mf6/data/mfdatascalar.py | 24 +-- flopy/mf6/data/mfdatastorage.py | 56 ++---- flopy/mf6/data/mfdatautil.py | 24 ++- flopy/mf6/data/mffileaccess.py | 45 ++--- flopy/mf6/data/mfstructure.py | 69 +++---- flopy/mf6/mfbase.py | 42 ++--- flopy/mf6/mfmodel.py | 51 +++--- flopy/mf6/mfpackage.py | 107 ++++------- flopy/mf6/modflow/mfsimulation.py | 70 +++---- flopy/mf6/utils/binaryfile_utils.py | 18 +- flopy/mf6/utils/binarygrid_util.py | 34 +--- flopy/mf6/utils/createpackages.py | 172 +++++++----------- flopy/mf6/utils/generate_classes.py | 32 ++-- flopy/mf6/utils/lakpak_utils.py | 6 +- flopy/mf6/utils/mfobservation.py | 13 +- flopy/mf6/utils/output_util.py | 19 +- flopy/mf6/utils/postprocessing.py | 3 +- flopy/mf6/utils/reference.py | 32 +--- flopy/modflow/mf.py | 117 +++++------- flopy/modflow/mfag.py | 62 +++---- flopy/modflow/mfbas.py | 11 +- flopy/modflow/mfbcf.py | 28 +-- flopy/modflow/mfchd.py | 8 +- flopy/modflow/mfde4.py | 38 ++-- flopy/modflow/mfdis.py | 39 ++-- flopy/modflow/mfdisu.py | 58 +++--- flopy/modflow/mfdrn.py | 13 +- flopy/modflow/mfdrt.py | 13 +- flopy/modflow/mfevt.py | 34 +--- flopy/modflow/mffhb.py | 60 +++--- flopy/modflow/mfflwob.py | 41 ++--- flopy/modflow/mfgage.py | 8 +- flopy/modflow/mfghb.py | 10 +- flopy/modflow/mfgmg.py | 16 +- flopy/modflow/mfhfb.py | 8 +- flopy/modflow/mfhob.py | 68 ++++--- flopy/modflow/mfhyd.py | 28 ++- flopy/modflow/mflak.py | 34 ++-- flopy/modflow/mflmt.py | 25 +-- flopy/modflow/mflpf.py | 30 ++- flopy/modflow/mfmlt.py | 7 +- flopy/modflow/mfmnw2.py | 24 +-- flopy/modflow/mfmnwi.py | 20 +- flopy/modflow/mfnwt.py | 48 ++--- flopy/modflow/mfoc.py | 72 +++----- flopy/modflow/mfpar.py | 26 +-- flopy/modflow/mfparbc.py | 4 +- flopy/modflow/mfpbc.py | 11 +- flopy/modflow/mfpcg.py | 50 ++--- flopy/modflow/mfpcgn.py | 46 ++--- flopy/modflow/mfpks.py | 44 ++--- flopy/modflow/mfpval.py | 2 +- flopy/modflow/mfrch.py | 29 ++- flopy/modflow/mfriv.py | 14 +- flopy/modflow/mfsfr2.py | 107 +++++------ flopy/modflow/mfsip.py | 22 +-- flopy/modflow/mfsms.py | 60 +++--- flopy/modflow/mfsor.py | 16 +- flopy/modflow/mfstr.py | 23 +-- flopy/modflow/mfsub.py | 82 ++++----- flopy/modflow/mfswi2.py | 46 ++--- flopy/modflow/mfswt.py | 59 +++--- flopy/modflow/mfupw.py | 18 +- flopy/modflow/mfuzf1.py | 30 +-- flopy/modflow/mfwel.py | 14 +- flopy/modflow/mfzon.py | 4 +- flopy/modflowlgr/mflgr.py | 67 +++---- flopy/modpath/mp6.py | 24 ++- flopy/modpath/mp6bas.py | 10 +- flopy/modpath/mp6sim.py | 88 ++++----- flopy/modpath/mp7.py | 22 +-- flopy/modpath/mp7bas.py | 12 +- flopy/modpath/mp7particledata.py | 28 +-- flopy/modpath/mp7particlegroup.py | 28 +-- flopy/modpath/mp7sim.py | 63 +++---- flopy/mt3d/mt.py | 67 +++---- flopy/mt3d/mtadv.py | 32 ++-- flopy/mt3d/mtbtn.py | 129 +++++++------ flopy/mt3d/mtdsp.py | 8 +- flopy/mt3d/mtgcg.py | 22 +-- flopy/mt3d/mtlkt.py | 39 ++-- flopy/mt3d/mtrct.py | 70 +++---- flopy/mt3d/mtsft.py | 65 +++---- flopy/mt3d/mtssm.py | 56 +++--- flopy/mt3d/mtuzt.py | 92 ++++------ flopy/pakbase.py | 87 +++------ flopy/pest/params.py | 2 +- flopy/pest/templatewriter.py | 7 +- flopy/pest/tplarray.py | 22 +-- flopy/plot/crosssection.py | 20 +- flopy/plot/map.py | 12 +- flopy/plot/plotutil.py | 75 ++++---- flopy/plot/styles.py | 12 +- flopy/seawat/swt.py | 30 ++- flopy/seawat/swtvdf.py | 15 +- flopy/seawat/swtvsc.py | 61 +++---- flopy/utils/binaryfile.py | 97 ++++------ flopy/utils/check.py | 30 ++- flopy/utils/cvfdutil.py | 20 +- flopy/utils/datafile.py | 29 ++- flopy/utils/datautil.py | 14 +- flopy/utils/flopy_io.py | 25 +-- flopy/utils/formattedfile.py | 4 +- flopy/utils/geometry.py | 8 +- flopy/utils/gridgen.py | 132 ++++++-------- flopy/utils/gridintersect.py | 8 +- flopy/utils/mflistfile.py | 26 ++- flopy/utils/mfreadnam.py | 34 ++-- flopy/utils/modpathfile.py | 32 ++-- flopy/utils/mtlistfile.py | 42 ++--- flopy/utils/observationfile.py | 6 +- flopy/utils/optionblock.py | 15 +- flopy/utils/postprocessing.py | 7 +- flopy/utils/rasters.py | 4 +- flopy/utils/sfroutputfile.py | 6 +- flopy/utils/swroutputfile.py | 7 +- flopy/utils/triangle.py | 50 +++-- flopy/utils/util_array.py | 159 +++++++--------- flopy/utils/util_list.py | 61 +++---- flopy/utils/voronoi.py | 4 +- flopy/utils/zonbud.py | 91 ++++----- flopy/version.py | 2 +- release/make-release.py | 53 +++--- release/run_notebooks.py | 2 +- release/update-version_changes.py | 18 +- 231 files changed, 2940 insertions(+), 4080 deletions(-) diff --git a/.docs/conf.py b/.docs/conf.py index baa65da88..d18a44d0d 100644 --- a/.docs/conf.py +++ b/.docs/conf.py @@ -37,9 +37,9 @@ with open(rst_name, "w") as f: for line in lines: if line.startswith("**Documentation for version"): - line = "**Documentation for version {}".format(__version__) + line = f"**Documentation for version {__version__}" if rc_text != "": - line += " --- {}".format(rc_text) + line += f" --- {rc_text}" line += "**\n" f.write(line) @@ -64,7 +64,7 @@ ) authors = __author__.split(sep=",") for author in authors: - line += " * {}\n".format(author.strip()) + line += f" * {author.strip()}\n" line += " * and others\n\n" f.write(line) elif line.startswith(tag_end): @@ -91,7 +91,7 @@ # -- Project information ----------------------------------------------------- project = "flopy Documentation" -copyright = "2021, {}".format(__author__) +copyright = f"2021, {__author__}" author = __author__ # The version. diff --git a/.docs/create_rstfiles.py b/.docs/create_rstfiles.py index 72df835d3..5a428f3d8 100644 --- a/.docs/create_rstfiles.py +++ b/.docs/create_rstfiles.py @@ -4,11 +4,11 @@ def create_section(f, title, filenames, upper_case=False): if upper_case: title = title.upper() - title = "{} Tutorials".format(title) - line = "{}\n".format(title) + len(title) * "-" + "\n\n" + title = f"{title} Tutorials" + line = f"{title}\n" + len(title) * "-" + "\n\n" line += "Contents:\n\n.. toctree::\n :maxdepth: 2\n\n" for filename in filenames: - line += " _notebooks/{}\n".format(filename) + line += f" _notebooks/{filename}\n" line += "\n\n" f.write(line) diff --git a/.docs/create_tutorials.py b/.docs/create_tutorials.py index 76158705c..dacc696a1 100644 --- a/.docs/create_tutorials.py +++ b/.docs/create_tutorials.py @@ -22,7 +22,7 @@ def create_notebooks(): # copy the python files for src in py_files: dst = os.path.join(wpth, os.path.basename(src)) - print("{} -> {}".format(src, dst)) + print(f"{src} -> {dst}") shutil.copyfile(src, dst) # create and run notebooks diff --git a/.docs/pysrc/tutorial1.py b/.docs/pysrc/tutorial1.py index c611220f9..83e68a855 100644 --- a/.docs/pysrc/tutorial1.py +++ b/.docs/pysrc/tutorial1.py @@ -55,7 +55,7 @@ import flopy.utils.binaryfile as bf plt.subplot(1, 1, 1, aspect="equal") -hds = bf.HeadFile(modelname + ".hds") +hds = bf.HeadFile(f"{modelname}.hds") head = hds.get_data(totim=1.0) levels = np.arange(1, 10, 1) extent = (delr / 2.0, Lx - delc / 2.0, Ly - delc / 2.0, delc / 2.0) @@ -66,12 +66,12 @@ fig = plt.figure(figsize=(10, 10)) ax = fig.add_subplot(1, 1, 1, aspect="equal") -hds = bf.HeadFile(modelname + ".hds") +hds = bf.HeadFile(f"{modelname}.hds") times = hds.get_times() head = hds.get_data(totim=times[-1]) levels = np.linspace(0, 10, 11) -cbb = bf.CellBudgetFile(modelname + ".cbc") +cbb = bf.CellBudgetFile(f"{modelname}.cbc") kstpkper_list = cbb.get_kstpkper() frf = cbb.get_data(text="FLOW RIGHT FACE", totim=times[-1])[0] fff = cbb.get_data(text="FLOW FRONT FACE", totim=times[-1])[0] diff --git a/.docs/pysrc/tutorial2.py b/.docs/pysrc/tutorial2.py index 5fc939645..c299e9cda 100644 --- a/.docs/pysrc/tutorial2.py +++ b/.docs/pysrc/tutorial2.py @@ -124,9 +124,9 @@ import flopy.utils.binaryfile as bf # Create the headfile and budget file objects -headobj = bf.HeadFile(modelname + ".hds") +headobj = bf.HeadFile(f"{modelname}.hds") times = headobj.get_times() -cbb = bf.CellBudgetFile(modelname + ".cbc") +cbb = bf.CellBudgetFile(f"{modelname}.cbc") # Setup contour parameters levels = np.linspace(0, 10, 11) @@ -156,7 +156,7 @@ # Create the plot f = plt.figure() plt.subplot(1, 1, 1, aspect="equal") - plt.title("stress period " + str(iplot + 1)) + plt.title(f"stress period {iplot + 1}") modelmap = flopy.plot.PlotMapView(model=mf, layer=0) qm = modelmap.plot_ibound() @@ -181,13 +181,13 @@ zorder=9, ) plt.text(wpt[0] + 25, wpt[1] - 25, "well", size=12, zorder=12) - plt.savefig("tutorial2-{}.png".format(iplot)) + plt.savefig(f"tutorial2-{iplot}.png") # Plot the head versus time idx = (0, int(nrow / 2) - 1, int(ncol / 2) - 1) ts = headobj.get_ts(idx) plt.subplot(1, 1, 1) -ttl = "Head at cell ({0},{1},{2})".format(idx[0] + 1, idx[1] + 1, idx[2] + 1) +ttl = f"Head at cell ({idx[0] + 1},{idx[1] + 1},{idx[2] + 1})" plt.title(ttl) plt.xlabel("time") plt.ylabel("head") diff --git a/autotest/autotest_notebooks.py b/autotest/autotest_notebooks.py index a16517278..da394e836 100644 --- a/autotest/autotest_notebooks.py +++ b/autotest/autotest_notebooks.py @@ -28,7 +28,7 @@ def run_notebook(dpth, fn): ) print(" ".join(arg)) ival = os.system(" ".join(arg)) - assert ival == 0, "could not run {}".format(fn) + assert ival == 0, f"could not run {fn}" def test_notebooks(): diff --git a/autotest/autotest_scripts.py b/autotest/autotest_scripts.py index dfa652560..c86d34026 100644 --- a/autotest/autotest_scripts.py +++ b/autotest/autotest_scripts.py @@ -58,7 +58,7 @@ def copy_scripts(src_dir, dst_dir, include_subdir=False): dst = os.path.join(dst_dir, os.path.basename(src)) # copy script - print("copying {} from {} to {}".format(filename, filedir, testdir)) + print(f"copying {filename} from {filedir} to {testdir}") shutil.copyfile(src, dst) return [os.path.basename(filepath) for filepath in files] @@ -77,29 +77,29 @@ def run_scripts(fn, testdir): # change to working directory opth = os.getcwd() - print('changing to working directory "{}"'.format(testdir)) + print(f'changing to working directory "{testdir}"') os.chdir(testdir) # run the script ival = run() # change back to starting directory - print('changing back to starting directory "{}"'.format(opth)) + print(f'changing back to starting directory "{opth}"') os.chdir(opth) # make sure script ran successfully - assert ival == 0, "could not run {}".format(fn) + assert ival == 0, f"could not run {fn}" def run_tutorial_scripts(fn, testdir): args = ("python", fn) - print("running...'{}'".format(" ".join(args))) + print(f"running...'{' '.join(args)}'") proc = Popen(args, stdout=PIPE, stderr=PIPE, cwd=testdir) stdout, stderr = proc.communicate() if stdout: print(stdout.decode("utf-8")) if stderr: - print("Errors:\n{}".format(stderr.decode("utf-8"))) + print(f"Errors:\n{stderr.decode('utf-8')}") return diff --git a/autotest/get_exes.py b/autotest/get_exes.py index 93688bdc9..fd846e69f 100644 --- a/autotest/get_exes.py +++ b/autotest/get_exes.py @@ -40,12 +40,12 @@ if dotlocal: bindir = os.path.join(os.path.expanduser("~"), ".local", "bin") bindir = os.path.abspath(bindir) - print("bindir: {}".format(bindir)) + print(f"bindir: {bindir}") if not os.path.isdir(bindir): os.makedirs(bindir) # write where the executables will be downloaded -print('modflow executables will be downloaded to:\n\n "{}"'.format(bindir)) +print(f'modflow executables will be downloaded to:\n\n "{bindir}"') def get_branch(): @@ -97,13 +97,13 @@ def move_exe(): continue src = os.path.join(exe_pth, file) dst = os.path.join(bindir, file) - print("moving {} -> {}".format(src, dst)) + print(f"moving {src} -> {dst}") shutil.move(src, dst) return def list_exes(): - cmd = "ls -l {}".format(bindir) + cmd = f"ls -l {bindir}" os.system(cmd) return @@ -125,7 +125,7 @@ def test_download_nightly_build(): # get the current branch branch = get_branch() - print("current branch: {}".format(branch)) + print(f"current branch: {branch}") # No need to replace MODFLOW 6 executables if branch == "master": diff --git a/autotest/t004_test_utilarray.py b/autotest/t004_test_utilarray.py index 71f552a9d..282a7bcd4 100644 --- a/autotest/t004_test_utilarray.py +++ b/autotest/t004_test_utilarray.py @@ -336,7 +336,7 @@ def stress_util2d(ml, nlay, nrow, ncol): # save hk up one dir from model_ws fnames = [] for i, h in enumerate(hk): - fname = os.path.join(out_dir, "test_{0}.ref".format(i)) + fname = os.path.join(out_dir, f"test_{i}.ref") fnames.append(fname) np.savetxt(fname, h, fmt="%15.6e", delimiter="") vk[i] = i + 1.0 @@ -406,7 +406,7 @@ def stress_util2d_for_joe_the_file_king(ml, nlay, nrow, ncol): # save hk up one dir from model_ws fnames = [] for i, h in enumerate(hk): - fname = os.path.join("test_{0}.ref".format(i)) + fname = os.path.join(f"test_{i}.ref") fnames.append(fname) np.savetxt(fname, h, fmt="%15.6e", delimiter="") vk[i] = i + 1.0 @@ -746,7 +746,7 @@ def test_mflist(): .to_records(index=True) .astype(data.dtype) ) - errmsg = "data not equal:\n {}\n {}".format(dfdata, data) + errmsg = f"data not equal:\n {dfdata}\n {data}" assert np.array_equal(dfdata, data), errmsg m4ds = ml.wel.stress_period_data.masked_4D_arrays diff --git a/autotest/t005_test.py b/autotest/t005_test.py index 829a8579c..ac01e6387 100644 --- a/autotest/t005_test.py +++ b/autotest/t005_test.py @@ -40,17 +40,17 @@ def test_modflow_unstructured(): # write well file wel.write_file() - assert os.path.isfile(os.path.join(cpth, "{}.wel".format(mf.name))) is True + assert os.path.isfile(os.path.join(cpth, f"{mf.name}.wel")) is True wel2 = flopy.modflow.ModflowWel.load( - os.path.join(cpth, "{}.wel".format(mf.name)), mf + os.path.join(cpth, f"{mf.name}.wel"), mf ) assert wel2.stress_period_data[0] == wel.stress_period_data[0] # write ghb file ghb.write_file(check=False) - assert os.path.isfile(os.path.join(cpth, "{}.ghb".format(mf.name))) is True + assert os.path.isfile(os.path.join(cpth, f"{mf.name}.ghb")) is True ghb2 = flopy.modflow.ModflowGhb.load( - os.path.join(cpth, "{}.ghb".format(mf.name)), mf + os.path.join(cpth, f"{mf.name}.ghb"), mf ) assert ghb2.stress_period_data[0] == ghb.stress_period_data[0] diff --git a/autotest/t006_test.py b/autotest/t006_test.py index 92007cc2b..814a35574 100644 --- a/autotest/t006_test.py +++ b/autotest/t006_test.py @@ -116,7 +116,7 @@ def test_cbc_ts(): ) zobj = flopy.utils.CellBudgetFile(fpth, precision="single") ts = zobj.get_ts(text="ZETASRF 1", idx=(0, 0, 24)) - errtxt = "shape of zeta timeseries is {} not (4, 2)".format(ts.shape) + errtxt = f"shape of zeta timeseries is {ts.shape} not (4, 2)" assert ts.shape == (4, 2), errtxt diff --git a/autotest/t007_test.py b/autotest/t007_test.py index 89f8eb388..520c8c57d 100644 --- a/autotest/t007_test.py +++ b/autotest/t007_test.py @@ -51,14 +51,14 @@ def remove_shp(shpname): def export_mf6_netcdf(path): - print("in export_mf6_netcdf: {}".format(path)) + print(f"in export_mf6_netcdf: {path}") sim = flopy.mf6.modflow.mfsimulation.MFSimulation.load(sim_ws=path) for name, model in sim.get_model_itr(): export_netcdf(model) def export_mf2005_netcdf(namfile): - print("in export_mf2005_netcdf: {}".format(namfile)) + print(f"in export_mf2005_netcdf: {namfile}") if namfile in skip: return m = flopy.modflow.Modflow.load(namfile, model_ws=pth, verbose=False) @@ -70,8 +70,8 @@ def export_mf2005_netcdf(namfile): if m.dis.botm.shape[0] != m.nlay: print("skipping...botm.shape[0] != nlay") return - assert m, "Could not load namefile {}".format(namfile) - msg = "Could not load {} model".format(namfile) + assert m, f"Could not load namefile {namfile}" + msg = f"Could not load {namfile} model" assert isinstance(m, flopy.modflow.Modflow), msg export_netcdf(m) @@ -83,61 +83,57 @@ def export_netcdf(m): import pyproj except: return - fnc = m.export(os.path.join(npth, m.name + ".nc")) + fnc = m.export(os.path.join(npth, f"{m.name}.nc")) fnc.write() - fnc_name = os.path.join(npth, m.name + ".nc") + fnc_name = os.path.join(npth, f"{m.name}.nc") try: fnc = m.export(fnc_name) fnc.write() except Exception as e: - msg = "ncdf export fail for namfile {}:\n{} ".format(m.name, str(e)) + msg = f"ncdf export fail for namfile {m.name}:\n{e!s} " raise Exception(msg) try: nc = netCDF4.Dataset(fnc_name, "r") except Exception as e: - msg = "ncdf import fail for nc file {}:\n{}".format(fnc_name, str(e)) + msg = f"ncdf import fail for nc file {fnc_name}:\n{e!s}" raise Exception() return def export_shapefile(namfile): - print("in export_shapefile: {}".format(namfile)) + print(f"in export_shapefile: {namfile}") shp = import_shapefile() if shp is None: return m = flopy.modflow.Modflow.load(namfile, model_ws=pth, verbose=False) - assert m, "Could not load namefile {}".format(namfile) - msg = "Could not load {} model".format(namfile) + assert m, f"Could not load namefile {namfile}" + msg = f"Could not load {namfile} model" assert isinstance(m, flopy.modflow.Modflow), msg - fnc_name = os.path.join(spth, m.name + ".shp") + fnc_name = os.path.join(spth, f"{m.name}.shp") try: fnc = m.export(fnc_name) # fnc2 = m.export(fnc_name, package_names=None) # fnc3 = m.export(fnc_name, package_names=['DIS']) except Exception as e: - msg = "shapefile export fail for namfile {}:\n{}".format( - namfile, str(e) - ) + msg = f"shapefile export fail for namfile {namfile}:\n{e!s}" raise Exception(msg) try: s = shp.Reader(fnc_name) except Exception as e: - msg = "shapefile import fail for {}:\n{}".format(fnc_name, str(e)) + msg = f"shapefile import fail for {fnc_name}:\n{e!s}" raise Exception(msg) - msg = "wrong number of records in shapefile {}:{:d}".format( - fnc_name, s.numRecords - ) + msg = f"wrong number of records in shapefile {fnc_name}:{s.numRecords}" assert s.numRecords == m.nrow * m.ncol, msg return def export_shapefile_modelgrid_override(namfile): - print("in export_modelgrid_override: {}".format(namfile)) + print(f"in export_modelgrid_override: {namfile}") shp = import_shapefile() if shp is None: return @@ -160,9 +156,9 @@ def export_shapefile_modelgrid_override(namfile): angrot=mg0.angrot, ) - assert m, "Could not load namefile {}".format(namfile) + assert m, f"Could not load namefile {namfile}" assert isinstance(m, flopy.modflow.Modflow) - fnc_name = os.path.join(spth, m.name + ".shp") + fnc_name = os.path.join(spth, f"{m.name}.shp") try: fnc = m.export(fnc_name, modelgrid=modelgrid) @@ -170,14 +166,12 @@ def export_shapefile_modelgrid_override(namfile): # fnc3 = m.export(fnc_name, package_names=['DIS']) except Exception as e: - msg = "shapefile export fail for namfile {}:\n{}".format( - namfile, str(e) - ) + msg = f"shapefile export fail for namfile {namfile}:\n{e!s}" raise Exception(msg) try: s = shp.Reader(fnc_name) except Exception as e: - msg = "shapefile import fail for {}:{}".format(fnc_name, str(e)) + msg = f"shapefile import fail for {fnc_name}:{e!s}" raise Exception(msg) @@ -216,10 +210,10 @@ def test_freyberg_export(): namfile, model_ws=model_ws, check=False, verbose=False ) # test export at model, package and object levels - m.export("{}/model.shp".format(spth)) - m.wel.export("{}/wel.shp".format(spth)) - m.lpf.hk.export("{}/hk.shp".format(spth)) - m.riv.stress_period_data.export("{}/riv_spd.shp".format(spth)) + m.export(f"{spth}/model.shp") + m.wel.export(f"{spth}/wel.shp") + m.lpf.hk.export(f"{spth}/hk.shp") + m.riv.stress_period_data.export(f"{spth}/riv_spd.shp") # transient # (doesn't work at model level because the total size of @@ -232,7 +226,7 @@ def test_freyberg_export(): load_only=["DIS", "BAS6", "NWT", "OC", "RCH", "WEL", "DRN", "UPW"], ) # test export without instantiating an sr - outshp = os.path.join(spth, namfile[:-4] + "_drn_sparse.shp") + outshp = os.path.join(spth, f"{namfile[:-4]}_drn_sparse.shp") m.drn.stress_period_data.export(outshp, sparse=True) assert os.path.exists(outshp) remove_shp(outshp) @@ -261,7 +255,7 @@ def test_freyberg_export(): # if wkt text was fetched from spatialreference.org if wkt is not None: # test default package export - outshp = os.path.join(spth, namfile[:-4] + "_dis.shp") + outshp = os.path.join(spth, f"{namfile[:-4]}_dis.shp") m.dis.export(outshp) prjfile = outshp.replace(".shp", ".prj") with open(prjfile) as src: @@ -270,7 +264,7 @@ def test_freyberg_export(): remove_shp(outshp) # test default package export to higher level dir - outshp = os.path.join(spth, namfile[:-4] + "_dis.shp") + outshp = os.path.join(spth, f"{namfile[:-4]}_dis.shp") m.dis.export(outshp) prjfile = outshp.replace(".shp", ".prj") with open(prjfile) as src: @@ -279,7 +273,7 @@ def test_freyberg_export(): remove_shp(outshp) # test sparse package export - outshp = os.path.join(spth, namfile[:-4] + "_drn_sparse.shp") + outshp = os.path.join(spth, f"{namfile[:-4]}_drn_sparse.shp") m.drn.stress_period_data.export(outshp, sparse=True) prjfile = outshp.replace(".shp", ".prj") assert os.path.exists(prjfile) @@ -697,14 +691,14 @@ def test_twri_mg(): mg, flopy.discretization.StructuredGrid ), "modelgrid is not an StructuredGrid instance" shape = (3, 15, 15) - assert mg.shape == shape, "modelgrid shape {} not equal to {}".format( - mg.shape, shape - ) + assert ( + mg.shape == shape + ), f"modelgrid shape {mg.shape} not equal to {shape}" thick = mg.thick shape = (5, 15, 15) - assert thick.shape == shape, "thickness shape {} not equal to {}".format( - thick.shape, shape - ) + assert ( + thick.shape == shape + ), f"thickness shape {thick.shape} not equal to {shape}" return @@ -802,12 +796,12 @@ def test_epsgs(): mg = flopy.discretization.StructuredGrid(delr=delr, delc=delc) mg.epsg = 102733 - assert mg.epsg == 102733, "mg.epsg is not 102733 ({})".format(mg.epsg) + assert mg.epsg == 102733, f"mg.epsg is not 102733 ({mg.epsg})" t_value = mg.__repr__() if not "proj4_str:epsg:102733" in t_value: raise AssertionError( - "proj4_str:epsg:102733 not in mg.__repr__(): ({})".format(t_value) + f"proj4_str:epsg:102733 not in mg.__repr__(): ({t_value})" ) mg.epsg = 4326 # WGS 84 @@ -817,12 +811,12 @@ def test_epsgs(): t_value = crs.grid_mapping_attribs["grid_mapping_name"] assert ( t_value == "latitude_longitude" - ), "grid_mapping_name is not latitude_longitude: {}".format(t_value) + ), f"grid_mapping_name is not latitude_longitude: {t_value}" t_value = mg.__repr__() if not "proj4_str:epsg:4326" in t_value: raise AssertionError( - "proj4_str:epsg:4326 not in sr.__repr__(): ({})".format(t_value) + f"proj4_str:epsg:4326 not in sr.__repr__(): ({t_value})" ) @@ -839,12 +833,8 @@ def test_dynamic_xll_yll(): ms2.modelgrid.set_coord_info(xoff=xll, yoff=yll, angrot=30.0) xll1, yll1 = ms2.modelgrid.xoffset, ms2.modelgrid.yoffset - assert xll1 == xll, "modelgrid.xoffset ({}) is not equal to {}".format( - xll1, xll - ) - assert yll1 == yll, "modelgrid.yoffset ({}) is not equal to {}".format( - yll1, yll - ) + assert xll1 == xll, f"modelgrid.xoffset ({xll1}) is not equal to {xll}" + assert yll1 == yll, f"modelgrid.yoffset ({yll1}) is not equal to {yll}" # check that xll, yll are being recomputed xll += 10.0 @@ -852,12 +842,8 @@ def test_dynamic_xll_yll(): ms2.modelgrid.set_coord_info(xoff=xll, yoff=yll, angrot=30.0) xll1, yll1 = ms2.modelgrid.xoffset, ms2.modelgrid.yoffset - assert xll1 == xll, "modelgrid.xoffset ({}) is not equal to {}".format( - xll1, xll - ) - assert yll1 == yll, "modelgrid.yoffset ({}) is not equal to {}".format( - yll1, yll - ) + assert xll1 == xll, f"modelgrid.xoffset ({xll1}) is not equal to {xll}" + assert yll1 == yll, f"modelgrid.yoffset ({yll1}) is not equal to {yll}" def test_namfile_readwrite(): @@ -886,21 +872,14 @@ def test_namfile_readwrite(): m2 = fm.Modflow.load("junk.nam", model_ws=os.path.join("temp", "t007")) t_value = abs(m2.modelgrid.xoffset - xll) - msg = "m2.modelgrid.xoffset ({}) does not equal {}".format( - m2.modelgrid.xoffset, xll - ) + msg = f"m2.modelgrid.xoffset ({m2.modelgrid.xoffset}) does not equal {xll}" assert t_value < 1e-2, msg t_value = abs(m2.modelgrid.yoffset - yll) - msg = "m2.modelgrid.yoffset ({}) does not equal {}".format( - m2.modelgrid.yoffset, yll - ) + msg = f"m2.modelgrid.yoffset ({m2.modelgrid.yoffset}) does not equal {yll}" assert t_value < 1e-2 - msg = ( - "m2.modelgrid.angrot ({}) ".format(m2.modelgrid.angrot) - + "does not equal 30" - ) + msg = f"m2.modelgrid.angrot ({m2.modelgrid.angrot}) does not equal 30" assert m2.modelgrid.angrot == 30, msg model_ws = os.path.join( @@ -971,8 +950,8 @@ def test_read_usgs_model_reference(): assert m2.modelgrid.epsg == mg.epsg # test reading non-default units from usgs.model.reference - shutil.copy(mrf, mrf + "_copy") - with open(mrf + "_copy") as src: + shutil.copy(mrf, f"{mrf}_copy") + with open(f"{mrf}_copy") as src: with open(mrf, "w") as dst: for line in src: if "epsg" in line: @@ -984,7 +963,7 @@ def test_read_usgs_model_reference(): assert m2.modelgrid.epsg == 4326 # have to delete this, otherwise it will mess up other tests - to_del = glob.glob(mrf + "*") + to_del = glob.glob(f"{mrf}*") for f in to_del: if os.path.exists(f): os.remove(os.path.join(f)) @@ -1345,14 +1324,14 @@ def test_get_lrc_get_node(): for node, (l, r, c) in enumerate(zip(layers, rows, cols)): # ensure get_lrc returns zero-based layer row col lrc = dis.get_lrc(node)[0] - assert lrc == (l, r, c), "get_lrc() returned {}, expecting {}".format( - lrc, (l, r, c) - ) + assert lrc == ( + l, + r, + c, + ), f"get_lrc() returned {lrc}, expecting {l, r, c}" # ensure get_node returns zero-based node number n = dis.get_node((l, r, c))[0] - assert node == n, "get_node() returned {}, expecting {}".format( - n, node - ) + assert node == n, f"get_node() returned {n}, expecting {node}" return @@ -1383,17 +1362,13 @@ def test_model_dot_plot(): ) ax = ml.plot() assert isinstance(ax, list), "ml.plot() ax is is not a list" - assert len(ax) == 18, "number of axes ({}) is " "not equal to 18".format( - len(ax) - ) + assert len(ax) == 18, f"number of axes ({len(ax)}) is not equal to 18" plt.close("all") # plot specific dataset ax = ml.bcf6.hy.plot() assert isinstance(ax, list), "ml.bcf6.hy.plot() ax is is not a list" - assert len(ax) == 2, "number of hy axes ({}) " "is not equal to 2".format( - len(ax) - ) + assert len(ax) == 2, f"number of hy axes ({len(ax)}) is not equal to 2" # special case where nlay != plottable ax = ml.bcf6.vcont.plot() @@ -1428,12 +1403,8 @@ def test_get_rc_from_node_coordinates(): x = xgrid[j] y = ygrid[i] r, c = mfdis.get_rc_from_node_coordinates(x, y) - assert r == i, "row {} not equal {} for xy ({}, {})".format( - r, i, x, y - ) - assert c == j, "col {} not equal {} for xy ({}, {})".format( - c, j, x, y - ) + assert r == i, f"row {r} not equal {i} for xy ({x}, {y})" + assert c == j, f"col {c} not equal {j} for xy ({x}, {y})" def test_netcdf_classmethods(): @@ -1519,7 +1490,7 @@ def test_shapefile_ibound(): shp = shapefile.Reader(shape_name) field_names = [item[0] for item in shp.fields][1:] ib_idx = field_names.index("ibound_1") - txt = "should be int instead of {0}".format(type(shp.record(0)[ib_idx])) + txt = f"should be int instead of {type(shp.record(0)[ib_idx])}" assert type(shp.record(0)[ib_idx]) == int, txt diff --git a/autotest/t008_test.py b/autotest/t008_test.py index db31e55a3..8df95ddcc 100644 --- a/autotest/t008_test.py +++ b/autotest/t008_test.py @@ -38,7 +38,7 @@ def load_model(namfile): m = flopy.modflow.Modflow.load( namfile, model_ws=pth, version="mf2005", verbose=True ) - assert m, "Could not load namefile {}".format(namfile) + assert m, f"Could not load namefile {namfile}" assert m.load_fail is False @@ -46,7 +46,7 @@ def load_parameter_model(namfile): m = flopy.modflow.Modflow.load( namfile, model_ws=ppth, version="mf2005", verbose=True ) - assert m, "Could not load namefile {}".format(namfile) + assert m, f"Could not load namefile {namfile}" assert m.load_fail is False @@ -59,7 +59,7 @@ def load_only_bas6_model(namfile): load_only=["bas6"], check=False, ) - assert m, "Could not load namefile {}".format(namfile) + assert m, f"Could not load namefile {namfile}" assert m.load_fail is False @@ -93,7 +93,7 @@ def test_nwt_model_load(): def load_nwt(nwtfile): ml = flopy.modflow.Modflow(model_ws=tpth, version="mfnwt") - fn = os.path.join(tpth, "{}.nwt".format(ml.name)) + fn = os.path.join(tpth, f"{ml.name}.nwt") if os.path.isfile(fn): os.remove(fn) if "fmt." in nwtfile.lower(): @@ -102,11 +102,11 @@ def load_nwt(nwtfile): ml.array_free_format = True nwt = flopy.modflow.ModflowNwt.load(nwtfile, ml) - msg = "{} load unsuccessful".format(os.path.basename(nwtfile)) + msg = f"{os.path.basename(nwtfile)} load unsuccessful" assert isinstance(nwt, flopy.modflow.ModflowNwt), msg nwt.write_file() - msg = "{} write unsuccessful".format(os.path.basename(nwtfile)) + msg = f"{os.path.basename(nwtfile)} write unsuccessful" assert os.path.isfile(fn), msg ml2 = flopy.modflow.Modflow(model_ws=tpth, version="mfnwt") diff --git a/autotest/t010_test.py b/autotest/t010_test.py index dd78d6873..ed6ab4ccd 100644 --- a/autotest/t010_test.py +++ b/autotest/t010_test.py @@ -34,7 +34,7 @@ def load_check_sfr(i, mfnam, model_ws, checker_output_path): # print('Testing {}\n'.format(mfnam) + '='*100) m = flopy.modflow.Modflow.load(mfnam, model_ws=model_ws) m.model_ws = checker_output_path - checker_outfile = os.path.join(tpth, "SFRcheck_{}.txt".format(m.name)) + checker_outfile = os.path.join(tpth, f"SFRcheck_{m.name}.txt") chk = m.sfr.check(checker_outfile, level=1) @@ -123,7 +123,7 @@ def test_sfrloadcheck(): def load_sfr_isfropt_icalc(isfropt, icalc): pth = os.path.join("..", "examples", "data", "sfr_test") - nam = "sfrtest{}{}.nam".format(isfropt, icalc) + nam = f"sfrtest{isfropt}{icalc}.nam" ml = flopy.modflow.Modflow.load( nam, check=False, model_ws=pth, exe_name="mfnwt" ) @@ -136,8 +136,8 @@ def load_sfr_isfropt_icalc(isfropt, icalc): success = ml.run_model()[0] if not success: raise AssertionError( - "sfrtest{}{}.nam".format(isfropt, icalc) - + "is broken, please fix SFR 6a, 6bc logic!" + f"sfrtest{isfropt}{icalc}.nam " + "is broken, please fix SFR 6a, 6bc logic!" ) diff --git a/autotest/t011_test.py b/autotest/t011_test.py index 953f51ea8..f74575883 100644 --- a/autotest/t011_test.py +++ b/autotest/t011_test.py @@ -97,7 +97,7 @@ def test_mf6listfile(): "RCH2_OUT", "RCH3_OUT", ]: - assert item in names, "{} not found in names".format(item) + assert item in names, f"{item} not found in names" assert len(names) == 25 inc = mflist.get_incremental() return @@ -119,8 +119,7 @@ def test_mflist_reducedpumping_fail(): list_file = os.path.join(pth, "ex3A.lst") # Catch before flopy to avoid masking file not found assert if not os.path.isfile(list_file): - msg = "{} {}".format(list_file, "not found") - raise FileNotFoundError(msg) + raise FileNotFoundError(f"{list_file} not found") mflist = flopy.utils.MfusgListBudget(list_file) mflist.get_reduced_pumping() diff --git a/autotest/t012_test.py b/autotest/t012_test.py index 5ea1911ad..01a09beb4 100644 --- a/autotest/t012_test.py +++ b/autotest/t012_test.py @@ -36,7 +36,7 @@ def test_mf2005_p07(): if ismf2005 is not None: success, buff = mf.run_model(silent=False) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "p7mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -56,7 +56,7 @@ def test_mf2005_p07(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -75,7 +75,7 @@ def test_mf2000_p07(): if ismf2k is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "p7mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -89,7 +89,7 @@ def test_mf2000_p07(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -107,7 +107,7 @@ def test_mf2000_HSSTest(): mf.write_input() if ismf2k is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "hsstest_mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( namfile, model_ws=pth, verbose=True, exe_name=mt3d_exe @@ -122,7 +122,7 @@ def test_mf2000_HSSTest(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -158,7 +158,7 @@ def test_mf2000_MultiDiffusion(): mf.write_input() if ismf2k is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "P7MT.NAM" mt = flopy.mt3d.mt.Mt3dms.load( namfile, model_ws=pth, verbose=True, exe_name=mt3d_exe @@ -171,7 +171,7 @@ def test_mf2000_MultiDiffusion(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -189,7 +189,7 @@ def test_mf2000_reinject(): mf.write_input() if ismf2k is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "p3mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -204,7 +204,7 @@ def test_mf2000_reinject(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -222,7 +222,7 @@ def test_mf2000_SState(): mf.write_input() if ismf2k is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "SState_mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -237,7 +237,7 @@ def test_mf2000_SState(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -257,7 +257,7 @@ def test_mf2000_tob(): mf.write_input() if ismf2k is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "p7mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -271,7 +271,7 @@ def test_mf2000_tob(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -289,7 +289,7 @@ def test_mf2000_zeroth(): mf.write_input() if ismf2k is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namfile = "z0mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -303,7 +303,7 @@ def test_mf2000_zeroth(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -332,7 +332,7 @@ def test_mfnwt_CrnkNic(): mf.write_input() if ismfnwt is not None: success, buff = mf.run_model(silent=False) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namefile = "CrnkNic.mtnam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -352,7 +352,7 @@ def test_mfnwt_CrnkNic(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -380,7 +380,7 @@ def test_mfnwt_LKT(): success = False if ismfnwt is not None: success, buff = mf.run_model(silent=False) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namefile = "lkt_mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -403,7 +403,7 @@ def test_mfnwt_LKT(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return @@ -425,7 +425,7 @@ def test_mfnwt_keat_uzf(): mf.write_input() if ismfnwt is not None: success, buff = mf.run_model(silent=True) - assert success, "{} did not run".format(mf.name) + assert success, f"{mf.name} did not run" namefile = "Keat_UZF_mt.nam" mt = flopy.mt3d.mt.Mt3dms.load( @@ -450,7 +450,7 @@ def test_mfnwt_keat_uzf(): success, buff = mt.run_model( silent=False, normal_msg="program completed." ) - assert success, "{} did not run".format(mt.name) + assert success, f"{mt.name} did not run" os.remove(os.path.join(cpth, ftlfile)) return diff --git a/autotest/t013_test.py b/autotest/t013_test.py index c3e5eea54..5c957d35f 100644 --- a/autotest/t013_test.py +++ b/autotest/t013_test.py @@ -30,45 +30,31 @@ def test_mt3d_create_withmfmodel(): # confirm that MT3D files exist assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, btn.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{btn.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, adv.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{adv.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, dsp.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{dsp.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, ssm.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{ssm.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, gcg.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{gcg.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, rct.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{rct.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, tob.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{tob.extension[0]}")) is True ) @@ -105,7 +91,7 @@ def test_mt3d_create_woutmfmodel(): wrn_msg = "mxss is None and modflowmodel is None." if len(w) > 0: - print("Number of warnings: {}".format(len(w))) + print(f"Number of warnings: {len(w)}") ipos = -1 for idx, wm in enumerate(w): print(wm.message) @@ -113,10 +99,10 @@ def test_mt3d_create_woutmfmodel(): ipos = idx break - assert ipos >= 0, "'{}' warning message not issued".format(wrn_msg) - assert w[ipos].category == UserWarning, "Warning category: {}".format( - w[0].category - ) + assert ipos >= 0, f"'{wrn_msg}' warning message not issued" + assert ( + w[ipos].category == UserWarning + ), f"Warning category: {w[0].category}" gcg = flopy.mt3d.Mt3dRct(mt) rct = flopy.mt3d.Mt3dGcg(mt) @@ -127,45 +113,31 @@ def test_mt3d_create_woutmfmodel(): # confirm that MT3D files exist assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, btn.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{btn.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, adv.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{adv.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, dsp.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{dsp.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, ssm.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{ssm.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, gcg.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{gcg.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, rct.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{rct.extension[0]}")) is True ) assert ( - os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, tob.extension[0])) - ) + os.path.isfile(os.path.join(model_ws, f"{mt.name}.{tob.extension[0]}")) is True ) @@ -185,7 +157,7 @@ def test_mt3d_pht3d(): # confirm that MT3D files exist assert os.path.isfile( - os.path.join(model_ws, "{}.{}".format(mt.name, phc.extension[0])) + os.path.join(model_ws, f"{mt.name}.{phc.extension[0]}") ) return diff --git a/autotest/t014_test.py b/autotest/t014_test.py index 910dcb1e7..dea10f40d 100644 --- a/autotest/t014_test.py +++ b/autotest/t014_test.py @@ -43,7 +43,7 @@ def load_str(mfnam, pth): assert success, "base model run did not terminate successfully" # load files - pth = os.path.join(cpth, "{}.str".format(m.name)) + pth = os.path.join(cpth, f"{m.name}.str") str2 = flopy.modflow.ModflowStr.load(pth, m) for name in str2.dtype.names: assert ( diff --git a/autotest/t016_test.py b/autotest/t016_test.py index d04bd7ef8..bfc4180a9 100644 --- a/autotest/t016_test.py +++ b/autotest/t016_test.py @@ -22,7 +22,7 @@ def test_usg_disu_load(): "..", "examples", "data", "mfusg_test", "01A_nestedgrid_nognc" ) fname = os.path.join(pthusgtest, "flow.disu") - assert os.path.isfile(fname), "disu file not found {}".format(fname) + assert os.path.isfile(fname), f"disu file not found {fname}" # Create the model m = flopy.modflow.Modflow(modelname="usgload", verbose=True) @@ -38,7 +38,7 @@ def test_usg_disu_load(): # Write the disu file disu.write_file() assert os.path.isfile( - os.path.join(model_ws, "{}.{}".format(m.name, m.disu.extension[0])) + os.path.join(model_ws, f"{m.name}.{m.disu.extension[0]}") ) # Load disu file @@ -64,7 +64,7 @@ def test_usg_sms_load(): "..", "examples", "data", "mfusg_test", "01A_nestedgrid_nognc" ) fname = os.path.join(pthusgtest, "flow.sms") - assert os.path.isfile(fname), "sms file not found {}".format(fname) + assert os.path.isfile(fname), f"sms file not found {fname}" # Create the model m = flopy.modflow.Modflow(modelname="usgload", verbose=True) @@ -80,7 +80,7 @@ def test_usg_sms_load(): # Write the sms file sms.write_file() assert os.path.isfile( - os.path.join(model_ws, "{}.{}".format(m.name, m.sms.extension[0])) + os.path.join(model_ws, f"{m.name}.{m.sms.extension[0]}") ) # Load sms file @@ -90,9 +90,7 @@ def test_usg_sms_load(): ): assert ( value1 == value2 - ), "key1 {}, value 1 {} != key2 {} value 2 {}".format( - key1, value1, key2, value2 - ) + ), f"key1 {key1}, value 1 {value1} != key2 {key2} value 2 {value2}" return @@ -131,7 +129,7 @@ def test_usg_model(): # try different complexity options; all should run successfully for complexity in ["simple", "moderate", "complex"]: - print("testing MFUSG with sms complexity: " + complexity) + print(f"testing MFUSG with sms complexity: {complexity}") sms = flopy.modflow.ModflowSms(mf, options=complexity) sms.write_file() if run: @@ -147,7 +145,7 @@ def test_usg_load_01B(): "..", "examples", "data", "mfusg_test", "01A_nestedgrid_nognc" ) fname = os.path.join(pthusgtest, "flow.nam") - assert os.path.isfile(fname), "nam file not found {}".format(fname) + assert os.path.isfile(fname), f"nam file not found {fname}" # Create the model m = flopy.modflow.Modflow(modelname="usgload_1b", verbose=True) @@ -172,7 +170,7 @@ def test_usg_load_45usg(): print("testing 3-layer unstructured mfusg model loading: 45usg.nam") pthusgtest = os.path.join("..", "examples", "data", "mfusg_test", "45usg") fname = os.path.join(pthusgtest, "45usg.nam") - assert os.path.isfile(fname), "nam file not found {}".format(fname) + assert os.path.isfile(fname), f"nam file not found {fname}" # Create the model m = flopy.modflow.Modflow(modelname="45usg", verbose=True) diff --git a/autotest/t017_test.py b/autotest/t017_test.py index 52fc58af3..342e72bac 100644 --- a/autotest/t017_test.py +++ b/autotest/t017_test.py @@ -36,9 +36,10 @@ def test_formattedfile_read(): ), "formatted head read using totim != head read using idx" ts = h.get_ts((0, 7, 5)) + expected = 944.487 assert np.isclose( - ts[0, 1], 944.487, 1e-6 - ), "time series value ({}) != {}".format(ts[0, 1], 944.487) + ts[0, 1], expected, 1e-6 + ), f"time series value ({ts[0, 1]}) != {expected}" h.close() # Check error when reading empty file @@ -59,7 +60,7 @@ def test_binaryfile_read(): assert isinstance(h, flopy.utils.HeadFile) times = h.get_times() - assert np.isclose(times[0], 10.0), "times[0] != {}".format(times[0]) + assert np.isclose(times[0], 10.0), f"times[0] != {times[0]}" kstpkper = h.get_kstpkper() assert kstpkper[0] == (0, 0), "kstpkper[0] != (0, 0)" @@ -75,9 +76,10 @@ def test_binaryfile_read(): ), "binary head read using totim != head read using idx" ts = h.get_ts((0, 7, 5)) + expected = 26.00697135925293 assert np.isclose( - ts[0, 1], 26.00697135925293 - ), "time series value ({}) != {}".format(ts[0, 1], -26.00697135925293) + ts[0, 1], expected + ), f"time series value ({ts[0, 1]}) != {expected}" h.close() # Check error when reading empty file @@ -160,11 +162,11 @@ def test_cellbudgetfile_position(): idx = 8767 ipos = v.get_position(idx) ival = 50235424 - assert ipos == ival, "position of index 8767 != {}".format(ival) + assert ipos == ival, f"position of index 8767 != {ival}" ipos = v.get_position(idx, header=True) ival = 50235372 - assert ipos == ival, "position of index 8767 header != {}".format(ival) + assert ipos == ival, f"position of index 8767 header != {ival}" cbcd = [] for i in range(idx, v.get_nrecords()): @@ -191,7 +193,7 @@ def test_cellbudgetfile_position(): try: v2.list_records() except: - assert False, "could not list records on {}".format(opth) + assert False, f"could not list records on {opth}" names = v2.get_unique_record_names(decode=True) @@ -201,7 +203,7 @@ def test_cellbudgetfile_position(): v2.close() for i, (d1, d2) in enumerate(zip(cbcd, cbcd2)): - msg = "{} data from slice is not identical".format(names[i].rstrip()) + msg = f"{names[i].rstrip()} data from slice is not identical" assert np.array_equal(d1, d2), msg # Check error when reading empty file diff --git a/autotest/t019_test.py b/autotest/t019_test.py index 50cccffca..daeb7d0e7 100644 --- a/autotest/t019_test.py +++ b/autotest/t019_test.py @@ -115,17 +115,15 @@ def test_hydmodfile_read(): for label in labels: data = h.get_data(obsname=label) - assert data.shape == (len(times),), "data shape is not ({},)".format( - len(times) - ) + assert data.shape == ( + len(times), + ), f"data shape is not ({len(times)},)" data = h.get_data() - assert data.shape == (len(times),), "data shape is not ({},)".format( - len(times) - ) + assert data.shape == (len(times),), f"data shape is not ({len(times)},)" assert ( len(data.dtype.names) == nitems + 1 - ), "data column length is not {}".format(len(nitems + 1)) + ), f"data column length is not {len(nitems + 1)}" try: import pandas as pd @@ -171,9 +169,9 @@ def test_mf6obsfile_read(): assert isinstance(h, flopy.utils.Mf6Obs) ntimes = h.get_ntimes() - assert ntimes == 3, "Not enough times in {} file...{}".format( - txt, os.path.basename(pth) - ) + assert ( + ntimes == 3 + ), f"Not enough times in {txt} file...{os.path.basename(pth)}" times = h.get_times() assert len(times) == 3, "Not enough times in {} file...{}".format( @@ -203,15 +201,15 @@ def test_mf6obsfile_read(): data = h.get_data(obsname=label) assert data.shape == ( len(times), - ), "data shape is not ({},)".format(len(times)) + ), f"data shape is not ({len(times)},)" data = h.get_data() - assert data.shape == (len(times),), "data shape is not ({},)".format( - len(times) - ) + assert data.shape == ( + len(times), + ), f"data shape is not ({len(times)},)" assert ( len(data.dtype.names) == nitems + 1 - ), "data column length is not {}".format(len(nitems + 1)) + ), f"data column length is not {len(nitems + 1)}" if pd is not None: for idx in range(ntimes): diff --git a/autotest/t020_test.py b/autotest/t020_test.py index 5b1907eb1..9d2f4c06c 100644 --- a/autotest/t020_test.py +++ b/autotest/t020_test.py @@ -31,9 +31,7 @@ def test_mfnwt_run(): exe = flopy.which(exe_name) if exe is None: - print( - "Specified executable {} does not exist in path".format(exe_name) - ) + print(f"Specified executable {exe_name} does not exist in path") return modelname = "watertable" @@ -128,7 +126,7 @@ def test_mfnwt_run(): # remove existing heads results, if necessary try: - os.remove(os.path.join(model_ws, "{0}.hds".format(modelname))) + os.remove(os.path.join(model_ws, f"{modelname}.hds")) except: pass # run existing model @@ -136,7 +134,7 @@ def test_mfnwt_run(): # Read the simulated MODFLOW-2005 model results # Create the headfile object - headfile = os.path.join(model_ws, "{0}.hds".format(modelname)) + headfile = os.path.join(model_ws, f"{modelname}.hds") headobj = flopy.utils.HeadFile(headfile, precision="single") times = headobj.get_times() head = headobj.get_data(totim=times[-1]) @@ -165,7 +163,7 @@ def test_mfnwt_run(): ax.set_xlabel("Horizontal distance, in m") ax.set_ylabel("Percent Error") - fig.savefig(os.path.join(model_ws, "{}.png".format(modelname))) + fig.savefig(os.path.join(model_ws, f"{modelname}.png")) return diff --git a/autotest/t023_test.py b/autotest/t023_test.py index 4b158b546..165baf1e4 100644 --- a/autotest/t023_test.py +++ b/autotest/t023_test.py @@ -63,7 +63,7 @@ def test_mt3d_multispecies(): ) # Load the MT3D model into mt2 and then write it out - fname = modelname + ".nam" + fname = f"{modelname}.nam" mt2 = flopy.mt3d.Mt3dms.load(fname, model_ws=testpth, verbose=True) # check obs I/O assert np.all(mt.btn.obs == mt2.btn.obs) diff --git a/autotest/t024_test.py b/autotest/t024_test.py index b05200172..fb68ec6af 100644 --- a/autotest/t024_test.py +++ b/autotest/t024_test.py @@ -180,7 +180,7 @@ def test_oc_check(): if __name__ == "__main__": - print("numpy version: {}".format(np.__version__)) + print(f"numpy version: {np.__version__}") for mfnam in testmodels: checker_on_load(mfnam) test_bcs_check() diff --git a/autotest/t025_test.py b/autotest/t025_test.py index 768b1dc94..2fafb6c93 100644 --- a/autotest/t025_test.py +++ b/autotest/t025_test.py @@ -50,19 +50,16 @@ def load_lak(mfnam, pth, run): try: success, buff = m.run_model(silent=True) except: - msg = "could not run base model {}".format( - os.path.splitext(mfnam)[0] - ) - print(msg) + print(f"could not run base model {os.path.splitext(mfnam)[0]}") pass msg = ( - "base model {} ".format(os.path.splitext(mfnam)[0]) - + "run did not terminate successfully" + f"base model {os.path.splitext(mfnam)[0]} " + "run did not terminate successfully" ) assert success, msg msg = ( - "base model {} ".format(os.path.splitext(mfnam)[0]) - + "run terminated successfully" + f"base model {os.path.splitext(mfnam)[0]} " + "run terminated successfully" ) print(msg) fn0 = os.path.join(lpth, mfnam) @@ -79,26 +76,21 @@ def load_lak(mfnam, pth, run): try: success, buff = m.run_model(silent=False) except: - msg = "could not run new model {}".format( - os.path.splitext(mfnam)[0] - ) - print(msg) + print(f"could not run new model {os.path.splitext(mfnam)[0]}") pass msg = ( - "new model {} ".format(os.path.splitext(mfnam)[0]) - + "run did not terminate successfully" + f"new model {os.path.splitext(mfnam)[0]} " + "run did not terminate successfully" ) assert success, msg msg = ( - "new model {} ".format(os.path.splitext(mfnam)[0]) - + "run terminated successfully" + f"new model {os.path.splitext(mfnam)[0]} " + "run terminated successfully" ) print(msg) fn1 = os.path.join(apth, mfnam) - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") if run: try: success = pymake.compare_budget( diff --git a/autotest/t026_test.py b/autotest/t026_test.py index e23070690..38b857b15 100644 --- a/autotest/t026_test.py +++ b/autotest/t026_test.py @@ -113,7 +113,7 @@ def test_seawat_henry(): if isseawat is not None: success, buff = mswt.run_model(silent=False) - assert success, "{} did not run".format(mswt.name) + assert success, f"{mswt.name} did not run" return @@ -181,7 +181,7 @@ def test_seawat2_henry(): if isseawat is not None: success, buff = m.run_model(silent=False) - assert success, "{} did not run".format(m.name) + assert success, f"{m.name} did not run" return diff --git a/autotest/t027_test.py b/autotest/t027_test.py index 6abe11da6..cd384044f 100644 --- a/autotest/t027_test.py +++ b/autotest/t027_test.py @@ -39,11 +39,11 @@ def test_load(): # load a real mnw2 package from a steady state model (multiple wells) m2 = flopy.modflow.Modflow("br", model_ws=cpth) path = os.path.join("..", "examples", "data", "mnw2_examples") - mnw2_2 = flopy.modflow.ModflowMnw2.load(path + "/BadRiver_cal.mnw2", m2) + mnw2_2 = flopy.modflow.ModflowMnw2.load(f"{path}/BadRiver_cal.mnw2", m2) mnw2_2.write_file(os.path.join(cpth, "brtest.mnw2")) m3 = flopy.modflow.Modflow("br", model_ws=cpth) - mnw2_3 = flopy.modflow.ModflowMnw2.load(cpth + "/brtest.mnw2", m3) + mnw2_3 = flopy.modflow.ModflowMnw2.load(f"{cpth}/brtest.mnw2", m3) mnw2_2.node_data.sort(order="wellid") mnw2_3.node_data.sort(order="wellid") assert np.array_equal(mnw2_2.node_data, mnw2_3.node_data) @@ -73,10 +73,10 @@ def test_mnw1_load_write(): assert len(m.mnw1.stress_period_data[i]) == 17 assert len(np.unique(m.mnw1.stress_period_data[i]["mnw_no"])) == 15 assert len(set(m.mnw1.stress_period_data[i]["label"])) == 4 - shutil.copy(mnw1_path + "/mnw1.nam", cpth) - shutil.copy(mnw1_path + "/mnw1.dis", cpth) - shutil.copy(mnw1_path + "/mnw1.bas", cpth) - m.mnw1.fn_path = cpth + "/mnw1.mnw" + shutil.copy(f"{mnw1_path}/mnw1.nam", cpth) + shutil.copy(f"{mnw1_path}/mnw1.dis", cpth) + shutil.copy(f"{mnw1_path}/mnw1.bas", cpth) + m.mnw1.fn_path = f"{cpth}/mnw1.mnw" m.mnw1.write_file() m2 = flopy.modflow.Modflow.load( "mnw1.nam", @@ -467,13 +467,13 @@ def test_blank_lines(): wellids2 = list(spd["wellid"]) emsg = "incorrect keys returned from load mnw2 stress period data" for wellid, wellid2 in zip(wellids, wellids2): - emsg += "\n {} -- {}".format(wellid, wellid2) + emsg += f"\n {wellid} -- {wellid2}" assert wellids2 == wellids, emsg rates2 = list(spd["qdes"]) emsg = "incorrect qdes rates returned from load mnw2 stress period data" for rate, rate2 in zip(rates, rates2): - emsg += "\n {} -- {}".format(rate, rate2) + emsg += f"\n {rate} -- {rate2}" assert rates2 == rates, emsg return diff --git a/autotest/t028_test.py b/autotest/t028_test.py index debae13b8..b0fc5116d 100644 --- a/autotest/t028_test.py +++ b/autotest/t028_test.py @@ -64,7 +64,7 @@ def test_seawat_array_format(): subds = ["1_classic_case1"] for subd in subds: pth = os.path.join(pthtest, d, subd) - testpth = os.path.join(newpth, d + "-" + subd) + testpth = os.path.join(newpth, f"{d}-{subd}") if os.path.isdir(testpth): shutil.rmtree(testpth) os.mkdir(testpth) @@ -79,7 +79,7 @@ def test_seawat_array_format(): m.write_input() if isswtv4 is not None and runmodel: success, buff = m.run_model(silent=False) - assert success, "{} did not run".format(m.name) + assert success, f"{m.name} did not run" return @@ -92,7 +92,7 @@ def test_swtv4(): def run_swtv4(d, subd): # set up paths pth = os.path.join(pthtest, d, subd) - testpth = os.path.join(newpth, d + "-" + subd) + testpth = os.path.join(newpth, f"{d}-{subd}") if os.path.isdir(testpth): shutil.rmtree(testpth) os.mkdir(testpth) @@ -113,7 +113,7 @@ def run_swtv4(d, subd): # run the model if isswtv4 is not None and runmodel: success, buff = m.run_model(silent=False) - assert success, "{} did not run".format(m.name) + assert success, f"{m.name} did not run" if __name__ == "__main__": diff --git a/autotest/t029_test.py b/autotest/t029_test.py index 5d32c1663..be9e54b4a 100644 --- a/autotest/t029_test.py +++ b/autotest/t029_test.py @@ -26,14 +26,12 @@ def test_mfgrddis_MfGrdFile(): nodes = grb.nodes ia = grb.ia shape = ia.shape[0] - assert shape == nodes + 1, "ia size ({}) not equal to {}".format( - shape, nodes + 1 - ) + assert shape == nodes + 1, f"ia size ({shape}) not equal to {nodes + 1}" nnz = ia[-1] ja = grb.ja shape = ja.shape[0] - assert shape == nnz, "ja size ({}) not equal to {}".format(shape, nnz) + assert shape == nnz, f"ja size ({shape}) not equal to {nnz}" modelgrid = grb.modelgrid assert isinstance( @@ -53,34 +51,30 @@ def test_mfgrddis_modelgrid(): lc = modelgrid.plot() assert isinstance( lc, matplotlib.collections.LineCollection - ), "could not plot grid object created from {}".format(fn) + ), f"could not plot grid object created from {fn}" plt.close() extents = modelgrid.extent errmsg = ( - "extents {} of {} ".format(extents, fn) - + "does not equal (0.0, 8000.0, 0.0, 8000.0)" + f"extents {extents} of {fn} does not equal (0.0, 8000.0, 0.0, 8000.0)" ) assert extents == (0.0, 8000.0, 0.0, 8000.0), errmsg ncpl = modelgrid.ncol * modelgrid.nrow - assert modelgrid.ncpl == ncpl, "ncpl ({}) does not equal {}".format( - modelgrid.ncpl, ncpl - ) + assert ( + modelgrid.ncpl == ncpl + ), f"ncpl ({modelgrid.ncpl}) does not equal {ncpl}" nvert = modelgrid.nvert iverts = modelgrid.iverts maxvertex = max([max(sublist[1:]) for sublist in iverts]) - assert maxvertex + 1 == nvert, "nvert ({}) does not equal {}".format( - maxvertex + 1, nvert - ) - verts = modelgrid.verts assert ( - nvert == verts.shape[0] - ), "number of vertex (x, y) pairs ({}) ".format( - verts.shape[0] - ) + "does not equal {}".format( - nvert + maxvertex + 1 == nvert + ), f"nvert ({maxvertex + 1}) does not equal {nvert}" + verts = modelgrid.verts + assert nvert == verts.shape[0], ( + f"number of vertex (x, y) pairs ({verts.shape[0]}) " + f"does not equal {nvert}" ) @@ -91,19 +85,17 @@ def test_mfgrddisv_MfGrdFile(): nodes = grb.nodes ia = grb.ia shape = ia.shape[0] - assert shape == nodes + 1, "ia size ({}) not equal to {}".format( - shape, nodes + 1 - ) + assert shape == nodes + 1, f"ia size ({shape}) not equal to {nodes + 1}" nnz = ia[-1] ja = grb.ja shape = ja.shape[0] - assert shape == nnz, "ja size ({}) not equal to {}".format(shape, nnz) + assert shape == nnz, f"ja size ({shape}) not equal to {nnz}" mg = grb.modelgrid assert isinstance( mg, flopy.discretization.VertexGrid - ), "invalid grid type ({})".format(type(mg)) + ), f"invalid grid type ({type(mg)})" def test_mfgrddisv_modelgrid(): @@ -113,42 +105,37 @@ def test_mfgrddisv_modelgrid(): ) assert isinstance( mg, flopy.discretization.VertexGrid - ), "invalid grid type ({})".format(type(mg)) + ), f"invalid grid type ({type(mg)})" ncpl = 218 - assert mg.ncpl == ncpl, "ncpl ({}) does not equal {}".format(mg.ncpl, ncpl) + assert mg.ncpl == ncpl, f"ncpl ({mg.ncpl}) does not equal {ncpl}" lc = mg.plot() assert isinstance( lc, matplotlib.collections.LineCollection - ), "could not plot grid object created from {}".format(fn) + ), f"could not plot grid object created from {fn}" plt.close("all") extents = mg.extent extents0 = (0.0, 700.0, 0.0, 700.0) - errmsg = "extents {} of {} ".format( - extents, fn - ) + "does not equal {}".format(extents0) + errmsg = f"extents {extents} of {fn} does not equal {extents0}" assert extents == extents0, errmsg nvert = mg.nvert iverts = mg.iverts maxvertex = max([max(sublist[1:]) for sublist in iverts]) - assert maxvertex + 1 == nvert, "nvert ({}) does not equal {}".format( - maxvertex + 1, nvert - ) - verts = mg.verts assert ( - nvert == verts.shape[0] - ), "number of vertex (x, y) pairs ({}) ".format( - verts.shape[0] - ) + "does not equal {}".format( - nvert + maxvertex + 1 == nvert + ), f"nvert ({maxvertex + 1}) does not equal {nvert}" + verts = mg.verts + assert nvert == verts.shape[0], ( + f"number of vertex (x, y) pairs ({verts.shape[0]}) " + f"does not equal {nvert}" ) cellxy = np.column_stack((mg.xyzcellcenters[:2])) - errmsg = "shape of flow.disv centroids {} not equal to (218, 2).".format( - cellxy.shape + errmsg = ( + f"shape of flow.disv centroids {cellxy.shape} not equal to (218, 2)." ) assert cellxy.shape == (218, 2), errmsg return @@ -161,19 +148,17 @@ def test_mfgrddisu_MfGrdFile(): nodes = grb.nodes ia = grb.ia shape = ia.shape[0] - assert shape == nodes + 1, "ia size ({}) not equal to {}".format( - shape, nodes + 1 - ) + assert shape == nodes + 1, f"ia size ({shape}) not equal to {nodes + 1}" nnz = ia[-1] ja = grb.ja shape = ja.shape[0] - assert shape == nnz, "ja size ({}) not equal to {}".format(shape, nnz) + assert shape == nnz, f"ja size ({shape}) not equal to {nnz}" mg = grb.modelgrid assert isinstance( mg, flopy.discretization.UnstructuredGrid - ), "invalid grid type ({})".format(type(mg)) + ), f"invalid grid type ({type(mg)})" @raises(TypeError) @@ -191,34 +176,29 @@ def test_mfgrddisu_modelgrid(): ) assert isinstance( mg, flopy.discretization.UnstructuredGrid - ), "invalid grid type ({})".format(type(mg)) + ), f"invalid grid type ({type(mg)})" lc = mg.plot() assert isinstance( lc, matplotlib.collections.LineCollection - ), "could not plot grid object created from {}".format(fn) + ), f"could not plot grid object created from {fn}" plt.close("all") extents = mg.extent extents0 = (0.0, 10000.0, 0.0, 1.0) - errmsg = "extents {} of {} ".format( - extents, fn - ) + "does not equal {}".format(extents0) + errmsg = f"extents {extents} of {fn} does not equal {extents0}" assert extents == extents0, errmsg nvert = mg.nvert iverts = mg.iverts maxvertex = max([max(sublist[1:]) for sublist in iverts]) - assert maxvertex + 1 == nvert, "nvert ({}) does not equal {}".format( - maxvertex + 1, nvert - ) - verts = mg.verts assert ( - nvert == verts.shape[0] - ), "number of vertex (x, y) pairs ({}) ".format( - verts.shape[0] - ) + "does not equal {}".format( - nvert + maxvertex + 1 == nvert + ), f"nvert ({maxvertex + 1}) does not equal {nvert}" + verts = mg.verts + assert nvert == verts.shape[0], ( + f"number of vertex (x, y) pairs ({verts.shape[0]}) " + f"does not equal {nvert}" ) return diff --git a/autotest/t031_test.py b/autotest/t031_test.py index 4618ad2bd..955d11e4e 100644 --- a/autotest/t031_test.py +++ b/autotest/t031_test.py @@ -41,9 +41,9 @@ def test_mpsim(): exe_name="mp6", modflowmodel=m, model_ws=path, - dis_file=m.name + ".dis", - head_file=m.name + ".hed", - budget_file=m.name + ".bud", + dis_file=f"{m.name}.dis", + head_file=f"{m.name}.hed", + budget_file=f"{m.name}.bud", ) mpb = flopy.modpath.Modpath6Bas( @@ -135,7 +135,7 @@ def test_get_destination_data(): # test deprecation if shapefile: - m.dis.export(path + "/dis.shp") + m.dis.export(f"{path}/dis.shp") pthld = PathlineFile(os.path.join(path, "EXAMPLE-3.pathline")) epd = EndpointFile(os.path.join(path, "EXAMPLE-3.endpoint")) diff --git a/autotest/t035_test.py b/autotest/t035_test.py index 70d4ac8d8..6075fe898 100644 --- a/autotest/t035_test.py +++ b/autotest/t035_test.py @@ -51,10 +51,10 @@ def test_simplelgr_load_and_write(silent=True): # get the namefiles of the parent and child namefiles = lgr.get_namefiles() - msg = "get_namefiles returned {} items instead of 2".format(len(namefiles)) + msg = f"get_namefiles returned {len(namefiles)} items instead of 2" assert len(namefiles) == 2, msg tpth = os.path.dirname(namefiles[0]) - msg = "dir path is {} not {}".format(tpth, opth) + msg = f"dir path is {tpth} not {opth}" assert tpth == opth, msg # run the lgr model @@ -71,10 +71,10 @@ def test_simplelgr_load_and_write(silent=True): # get the namefiles of the parent and child namefiles = lgr.get_namefiles() - msg = "get_namefiles returned {} items instead of 2".format(len(namefiles)) + msg = f"get_namefiles returned {len(namefiles)} items instead of 2" assert len(namefiles) == 2, msg tpth = os.path.dirname(namefiles[0]) - msg = "dir path is {} not {}".format(tpth, npth) + msg = f"dir path is {tpth} not {npth}" assert tpth == npth, msg # write the lgr model in to the new path @@ -137,11 +137,11 @@ def singleModel( lRunSingle=False, ): if iChild > 0: - print("child model" + modelname) + print(f"child model {modelname}") iLUoffset = 100 * int(iChild) - print("increase Unit Numbers by " + str(iLUoffset)) + print(f"increase Unit Numbers by {iLUoffset}") else: - print("parent model " + modelname) + print(f"parent model {modelname}") iLUoffset = 0 if steady: nper = 1 @@ -294,7 +294,7 @@ def test_simple_lgrmodel_from_scratch(silent=True): nstp = [ats] tsmult = 1.07 steady = True - rundir = cpth + "b" + rundir = f"{cpth}b" lgrExe = exe_name # wel data diff --git a/autotest/t036_test.py b/autotest/t036_test.py index e58ae9220..d2b282b85 100644 --- a/autotest/t036_test.py +++ b/autotest/t036_test.py @@ -87,9 +87,7 @@ def test_uzf_unit_numbers(): # compare budget terms if run: - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") try: success = pymake.compare_budget( fn0, fn1, max_incpd=0.1, max_cumpd=0.1, outfile=fsum @@ -137,7 +135,7 @@ def test_unitnums_load_and_write(): msg = ( "modflow-2005 testsfr2_tab does not have " - + "1 layer, 7 rows, and 100 columns" + "1 layer, 7 rows, and 100 columns" ) v = (m.nlay, m.nrow, m.ncol, m.nper) assert v == (1, 7, 100, 50), msg @@ -162,9 +160,7 @@ def test_unitnums_load_and_write(): fn1 = os.path.join(apth, mfnam) if run: - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") try: success = pymake.compare_budget( fn0, fn1, max_incpd=0.1, max_cumpd=0.1, outfile=fsum diff --git a/autotest/t037_test.py b/autotest/t037_test.py index 323813e11..42e1f3511 100644 --- a/autotest/t037_test.py +++ b/autotest/t037_test.py @@ -68,9 +68,7 @@ def load_swi(mfnam, pth): fn1 = os.path.join(apth, mfnam) if run: - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") try: success = pymake.compare_budget( fn0, fn1, max_incpd=0.1, max_cumpd=0.1, outfile=fsum diff --git a/autotest/t038_test.py b/autotest/t038_test.py index c6dd517b9..0ed5587a8 100644 --- a/autotest/t038_test.py +++ b/autotest/t038_test.py @@ -31,7 +31,7 @@ def load_model(namfile, model_ws): m = flopy.modflow.Modflow.load( namfile, model_ws=model_ws, version="mfusg", verbose=True, check=False ) - assert m, "Could not load namefile {}".format(namfile) + assert m, f"Could not load namefile {namfile}" assert m.load_fail is False m.change_model_ws(tpth) m.write_input() diff --git a/autotest/t039_test.py b/autotest/t039_test.py index 1bb5d39ad..4eda87ac1 100644 --- a/autotest/t039_test.py +++ b/autotest/t039_test.py @@ -65,7 +65,7 @@ def read_zonebudget_file(fname): elif items[0] == "" and items[1] == "\n": continue - record = "{}_".format(flow_dir) + "_".join(items[0].strip().split()) + record = f"{flow_dir}_" + "_".join(items[0].strip().split()) if record.startswith(("FROM_", "TO_")): record = "_".join(record.split("_")[1:]) vals = [float(i) for i in items[1:-1]] @@ -121,9 +121,7 @@ def test_compare2zonebudget(rtol=1e-2): # a1[idxloc], # a2[idxloc]) # print(txt) - s = "Zonebudget arrays do not match at time {0} ({1}): {2}.".format( - time, name, mxdiff - ) + s = f"Zonebudget arrays do not match at time {time} ({name}): {mxdiff}." assert allclose, s return @@ -268,9 +266,7 @@ def test_zonbud_active_areas_zone_zero(rtol=1e-2): zbud = pd.read_csv(zbud_f) zbud.columns = [c.strip() for c in zbud.columns] zbud.columns = ["_".join(c.split()) for c in zbud.columns] - zbud.index = pd.Index( - ["ZONE_{}".format(z) for z in zbud.ZONE.values], name="name" - ) + zbud.index = pd.Index([f"ZONE_{z}" for z in zbud.ZONE.values], name="name") cols = [c for c in zbud.columns if "ZONE_" in c] zbud = zbud[cols] @@ -282,7 +278,7 @@ def test_zonbud_active_areas_zone_zero(rtol=1e-2): fpbud = fpbud[["name"] + [c for c in fpbud.columns if "ZONE" in c]] fpbud = fpbud.set_index("name").T fpbud = fpbud[[c for c in fpbud.columns if "ZONE" in c]] - fpbud = fpbud.loc[["ZONE_{}".format(z) for z in range(1, 4)]] + fpbud = fpbud.loc[[f"ZONE_{z}" for z in range(1, 4)]] # Test for equality allclose = np.allclose(zbud, fpbud, rtol) diff --git a/autotest/t040_test.py b/autotest/t040_test.py index 72ffea6f3..eef885a13 100644 --- a/autotest/t040_test.py +++ b/autotest/t040_test.py @@ -28,26 +28,26 @@ def build_model(): ml.run_model() hds_geo = flopy.utils.HeadFile( - os.path.join(model_ws, ml.name + ".swt_geostatic_stress.hds"), + os.path.join(model_ws, f"{ml.name}.swt_geostatic_stress.hds"), text="stress", ).get_alldata() hds_eff = flopy.utils.HeadFile( - os.path.join(model_ws, ml.name + ".swt_eff_stress.hds"), + os.path.join(model_ws, f"{ml.name}.swt_eff_stress.hds"), text="effective stress", ).get_alldata() hds_sub = flopy.utils.HeadFile( - os.path.join(model_ws, ml.name + ".swt_subsidence.hds"), + os.path.join(model_ws, f"{ml.name}.swt_subsidence.hds"), text="subsidence", ).get_alldata() hds_comp = flopy.utils.HeadFile( - os.path.join(model_ws, ml.name + ".swt_total_comp.hds"), + os.path.join(model_ws, f"{ml.name}.swt_total_comp.hds"), text="layer compaction", ).get_alldata() hds_precon = flopy.utils.HeadFile( - os.path.join(model_ws, ml.name + ".swt_precon_stress.hds"), + os.path.join(model_ws, f"{ml.name}.swt_precon_stress.hds"), text="preconsol stress", ).get_alldata() diff --git a/autotest/t041_test.py b/autotest/t041_test.py index 2ef739bb8..107858bad 100644 --- a/autotest/t041_test.py +++ b/autotest/t041_test.py @@ -261,7 +261,7 @@ def test_filenames(): modelname = "hob_simple" pkglst = ["dis", "bas6", "pcg", "lpf"] m = flopy.modflow.Modflow.load( - modelname + ".nam", + f"{modelname}.nam", model_ws=pth, check=False, load_only=pkglst, @@ -277,8 +277,8 @@ def test_filenames(): column=5, time_series_data=[[1.0, 54.4], [2.0, 55.2]], ) - f_in = modelname + "_custom_fname.hob" - f_out = modelname + "_custom_fname.hob.out" + f_in = f"{modelname}_custom_fname.hob" + f_out = f"{modelname}_custom_fname.hob.out" filenames = [f_in, f_out] hob = flopy.modflow.ModflowHob( m, @@ -292,9 +292,7 @@ def test_filenames(): # Write the model input files m.write_input() - s = "output filename ({}) does not match specified name".format( - m.get_output(unit=51) - ) + s = f"output filename ({m.get_output(unit=51)}) does not match specified name" assert m.get_output(unit=51) == f_out, s s = "specified HOB input file not found" assert os.path.isfile(os.path.join(pth, f_in)), s @@ -453,23 +451,23 @@ def test_flwob_load(): ) # check variables were read properly - s = "nqfb loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"nqfb loaded from {m.drob.fn_path} read incorrectly" assert drob.nqfb == m.drob.nqfb, s - s = "nqcfb loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"nqcfb loaded from {m.drob.fn_path} read incorrectly" assert drob.nqcfb == m.drob.nqcfb, s - s = "nqtfb loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"nqtfb loaded from {m.drob.fn_path} read incorrectly" assert drob.nqtfb == m.drob.nqtfb, s - s = "obsnam loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"obsnam loaded from {m.drob.fn_path} read incorrectly" assert list([n for n in drob.obsnam]) == list( [n for n in m.drob.obsnam] ), s - s = "flwobs loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"flwobs loaded from {m.drob.fn_path} read incorrectly" assert np.array_equal(drob.flwobs, m.drob.flwobs), s - s = "layer loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"layer loaded from {m.drob.fn_path} read incorrectly" assert np.array_equal(drob.layer, m.drob.layer), s - s = "row loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"row loaded from {m.drob.fn_path} read incorrectly" assert np.array_equal(drob.row, m.drob.row), s - s = "column loaded from {} read incorrectly".format(m.drob.fn_path) + s = f"column loaded from {m.drob.fn_path} read incorrectly" assert np.array_equal(drob.column, m.drob.column), s return diff --git a/autotest/t043_test.py b/autotest/t043_test.py index 4fd0b298c..7fe382435 100644 --- a/autotest/t043_test.py +++ b/autotest/t043_test.py @@ -72,10 +72,7 @@ def test_gage_load_and_write(): for f in files: pth0 = os.path.join(opth, f) pth1 = os.path.join(npth, f) - msg = ( - 'new and original gage file "{}" '.format(f) - + "are not binary equal." - ) + msg = f'new and original gage file "{f}" are not binary equal.' assert filecmp.cmp(pth0, pth1), msg except: raise ValueError("could not load new GAGE output files") diff --git a/autotest/t044_test.py b/autotest/t044_test.py index bbcdd0ee9..9f111c346 100644 --- a/autotest/t044_test.py +++ b/autotest/t044_test.py @@ -74,9 +74,7 @@ def load_and_write_pcgn(mfnam, pth): fn1 = os.path.join(apth, mfnam) if run: - fsum = os.path.join( - compth, "{}.head.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.head.out") success = False try: success = pymake.compare_heads(fn0, fn1, outfile=fsum, htol=0.005) @@ -86,9 +84,7 @@ def load_and_write_pcgn(mfnam, pth): assert success, "head comparison failure" - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") success = False try: success = pymake.compare_budget( diff --git a/autotest/t045_test.py b/autotest/t045_test.py index f4cafd8b4..d1f8adce6 100644 --- a/autotest/t045_test.py +++ b/autotest/t045_test.py @@ -69,9 +69,7 @@ def load_and_write_gmg(mfnam, pth): fn1 = os.path.join(apth, mfnam) if run: - fsum = os.path.join( - compth, "{}.head.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.head.out") success = False try: success = pymake.compare_heads(fn0, fn1, outfile=fsum) @@ -81,9 +79,7 @@ def load_and_write_gmg(mfnam, pth): assert success, "head comparison failure" - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") success = False try: success = pymake.compare_budget( diff --git a/autotest/t046_test.py b/autotest/t046_test.py index 4d720e460..edcb554fa 100644 --- a/autotest/t046_test.py +++ b/autotest/t046_test.py @@ -87,9 +87,7 @@ def load_and_write(mfnam, pth): if run: # compare heads - fsum = os.path.join( - compth, "{}.head.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.head.out") success = False try: success = pymake.compare_heads(fn0, fn1, outfile=fsum) @@ -100,9 +98,7 @@ def load_and_write(mfnam, pth): assert success, "head comparison failure" # compare heads - fsum = os.path.join( - compth, "{}.ddn.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.ddn.out") success = False try: success = pymake.compare_heads( @@ -115,9 +111,7 @@ def load_and_write(mfnam, pth): assert success, "head comparison failure" # compare budgets - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") success = False try: success = pymake.compare_budget( diff --git a/autotest/t047_test.py b/autotest/t047_test.py index 8576c08a8..39ad0caed 100644 --- a/autotest/t047_test.py +++ b/autotest/t047_test.py @@ -19,7 +19,7 @@ def get_namefile_entries(fpth): try: f = open(fpth, "r") except: - print("could not open...{}".format(fpth)) + print(f"could not open...{fpth}") return None dtype = [ ("ftype", "|S12"), @@ -77,7 +77,7 @@ def test_gage(): m.write_input() # check that the gage output units entries are in the name file - fpth = os.path.join(cpth, "{}.nam".format(mnam)) + fpth = os.path.join(cpth, f"{mnam}.nam") entries = get_namefile_entries(fpth) for idx, g in enumerate(gages): if g[0] < 0: @@ -91,7 +91,7 @@ def test_gage(): found = True iun = iut break - assert found, "{} not in name file entries".format(iu) + assert found, f"{iu} not in name file entries" return @@ -124,7 +124,7 @@ def test_gage_files(): m.write_input() # check that the gage output file entries are in the name file - fpth = os.path.join(cpth, "{}.nam".format(mnam)) + fpth = os.path.join(cpth, f"{mnam}.nam") entries = get_namefile_entries(fpth) for idx, f in enumerate(files): found = False @@ -136,11 +136,11 @@ def test_gage_files(): found = True iun = entries[jdx]["unit"] break - assert found, "{} not in name file entries".format(f) + assert found, f"{f} not in name file entries" iu = abs(gages[idx][1]) - assert iu == iun, "{} unit not equal to {} ".format( - f, iu - ) + "- name file unit = {}".format(iun) + assert ( + iu == iun + ), f"{f} unit not equal to {iu} - name file unit = {iun}" return @@ -173,7 +173,7 @@ def test_gage_filenames0(): m.write_input() # check that the gage output units entries are in the name file - fpth = os.path.join(cpth, "{}.nam".format(mnam)) + fpth = os.path.join(cpth, f"{mnam}.nam") entries = get_namefile_entries(fpth) for idx, g in enumerate(gages): if g[0] < 0: @@ -187,7 +187,7 @@ def test_gage_filenames0(): found = True iun = iut break - assert found, "{} not in name file entries".format(iu) + assert found, f"{iu} not in name file entries" return @@ -220,7 +220,7 @@ def test_gage_filenames(): m.write_input() # check that the gage output file entries are in the name file - fpth = os.path.join(cpth, "{}.nam".format(mnam)) + fpth = os.path.join(cpth, f"{mnam}.nam") entries = get_namefile_entries(fpth) for idx, f in enumerate(filenames[1:]): found = False @@ -232,11 +232,11 @@ def test_gage_filenames(): found = True iun = entries[jdx]["unit"] break - assert found, "{} not in name file entries".format(f) + assert found, f"{f} not in name file entries" iu = abs(gages[idx][1]) - assert iu == iun, "{} unit not equal to {} ".format( - f, iu - ) + "- name file unit = {}".format(iun) + assert ( + iu == iun + ), f"{f} unit not equal to {iu} - name file unit = {iun}" return diff --git a/autotest/t048_test.py b/autotest/t048_test.py index 849114e8a..8c5296c60 100644 --- a/autotest/t048_test.py +++ b/autotest/t048_test.py @@ -76,9 +76,7 @@ def load_and_write_fhb(mfnam, pth): fn1 = os.path.join(apth, mfnam) if run: - fsum = os.path.join( - compth, "{}.head.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.head.out") success = False try: success = pymake.compare_heads(fn0, fn1, outfile=fsum) @@ -88,9 +86,7 @@ def load_and_write_fhb(mfnam, pth): assert success, "head comparison failure" - fsum = os.path.join( - compth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(compth, f"{os.path.splitext(mfnam)[0]}.budget.out") success = False try: success = pymake.compare_budget( diff --git a/autotest/t049_test.py b/autotest/t049_test.py index 12ee2e245..47578172d 100644 --- a/autotest/t049_test.py +++ b/autotest/t049_test.py @@ -153,9 +153,7 @@ def test_pathline_plot(): lpth = pth nampath = os.path.join(lpth, mfnam) - assert os.path.exists(nampath), "namefile {} doesn't exist.".format( - nampath - ) + assert os.path.exists(nampath), f"namefile {nampath} doesn't exist." # load the modflow files for model map m = flopy.modflow.Modflow.load( mfnam, model_ws=lpth, verbose=True, forgive=False, exe_name=mf2005_exe @@ -177,9 +175,7 @@ def test_pathline_plot(): # determine version ver = pthobj.version - assert ver == 6, "{} is not a MODPATH version 6 pathline file".format( - pthfile - ) + assert ver == 6, f"{pthfile} is not a MODPATH version 6 pathline file" # get all pathline data plines = pthobj.get_alldata() @@ -202,7 +198,7 @@ def test_pathline_plot(): plt.savefig(fpth) plt.close() except: - assert False, "could not save plot as {}".format(fpth) + assert False, f"could not save plot as {fpth}" mm = flopy.plot.PlotMapView(model=m) try: @@ -222,7 +218,7 @@ def test_pathline_plot(): plt.savefig(fpth) plt.close() except: - assert False, "could not save plot as {}".format(fpth) + assert False, f"could not save plot as {fpth}" mm = flopy.plot.PlotMapView(model=m) try: @@ -242,7 +238,7 @@ def test_pathline_plot(): plt.savefig(fpth) plt.close() except: - assert False, "could not save plot as {}".format(fpth) + assert False, f"could not save plot as {fpth}" return @@ -265,9 +261,9 @@ def test_pathline_plot_xc(): exe_name=mpth_exe, modflowmodel=ml, model_ws=os.path.join(".", "temp"), - dis_file=ml.name + ".DIS", - head_file=ml.name + ".hed", - budget_file=ml.name + ".bud", + dis_file=f"{ml.name}.DIS", + head_file=f"{ml.name}.hed", + budget_file=f"{ml.name}.bud", ) mpb = flopy.modpath.Modpath6Bas( @@ -326,7 +322,7 @@ def test_mp5_load(): # determine version ver = pthobj.version - assert ver == 5, "{} is not a MODPATH version 5 pathline file".format(fpth) + assert ver == 5, f"{fpth} is not a MODPATH version 5 pathline file" # read all of the pathline and endpoint data plines = pthobj.get_alldata() @@ -347,17 +343,11 @@ def test_mp5_load(): try: mm.plot_pathline(p, colors=colors[n], layer="all") except: - assert False, ( - "could not plot pathline {} ".format(n + 1) - + 'with layer="all"' - ) + assert False, f'could not plot pathline {n + 1} with layer="all"' try: mm.plot_endpoint(e) except: - assert False, ( - "could not plot endpoint {} ".format(n + 1) - + 'with layer="all"' - ) + assert False, f'could not plot endpoint {n + 1} with layer="all"' # plot the grid and ibound array try: @@ -371,7 +361,7 @@ def test_mp5_load(): plt.savefig(fpth, dpi=300) plt.close() except: - assert False, "could not save plot as {}".format(fpth) + assert False, f"could not save plot as {fpth}" return @@ -405,8 +395,8 @@ def test_mp6_timeseries_load(): def eval_timeseries(file): ts = flopy.utils.TimeseriesFile(file) msg = ( - "{} ".format(os.path.basename(file)) - + "is not an instance of flopy.utils.TimeseriesFile" + f"{os.path.basename(file)} " + "is not an instance of flopy.utils.TimeseriesFile" ) assert isinstance(ts, flopy.utils.TimeseriesFile), msg @@ -415,9 +405,7 @@ def eval_timeseries(file): tsd = ts.get_alldata() except: pass - msg = "could not load data using get_alldata() from {}.".format( - os.path.basename(file) - ) + msg = f"could not load data using get_alldata() from {os.path.basename(file)}." assert len(tsd) > 0, msg # get the data for the last particleid @@ -428,7 +416,7 @@ def eval_timeseries(file): pass msg = ( "could not get maximum particleid using get_maxid() from " - + "{}.".format(os.path.basename(file)) + f"{os.path.basename(file)}." ) assert partid is not None, msg @@ -437,11 +425,8 @@ def eval_timeseries(file): except: pass msg = ( - "could not load data for particleid {} ".format(partid) - + "using get_data() from " - + "{}. ".format(os.path.basename(file)) - + "Maximum partid = " - + "{}.".format(ts.get_maxid()) + f"could not load data for particleid {partid} using get_data() from " + f"{os.path.basename(file)}. Maximum partid = {ts.get_maxid()}." ) assert tsd.shape[0] > 0, msg @@ -452,7 +437,7 @@ def eval_timeseries(file): pass msg = ( "could not get maximum time using get_maxtime() from " - + "{}.".format(os.path.basename(file)) + f"{os.path.basename(file)}." ) assert timemax is not None, msg @@ -461,11 +446,8 @@ def eval_timeseries(file): except: pass msg = ( - "could not load data for totim>={} ".format(timemax) - + "using get_alldata() from " - + "{}. ".format(os.path.basename(file)) - + "Maximum totim = " - + "{}.".format(ts.get_maxtime()) + f"could not load data for totim>={timemax} using get_alldata() from " + f"{os.path.basename(file)}. Maximum totim = {ts.get_maxtime()}." ) assert len(tsd) > 0, msg @@ -476,7 +458,7 @@ def eval_timeseries(file): pass msg = ( "could not get maximum time using get_maxtime() from " - + "{}.".format(os.path.basename(file)) + f"{os.path.basename(file)}." ) assert timemax is not None, msg @@ -485,11 +467,8 @@ def eval_timeseries(file): except: pass msg = ( - "could not load data for totim<={} ".format(timemax) - + "using get_alldata() from " - + "{}. ".format(os.path.basename(file)) - + "Maximum totim = " - + "{}.".format(ts.get_maxtime()) + f"could not load data for totim<={timemax} using get_alldata() from " + f"{os.path.basename(file)}. Maximum totim = {ts.get_maxtime()}." ) assert len(tsd) > 0, msg diff --git a/autotest/t050_test.py b/autotest/t050_test.py index d844c95b0..2fa3909d9 100644 --- a/autotest/t050_test.py +++ b/autotest/t050_test.py @@ -175,7 +175,7 @@ def test_vtk_export_packages(): # totalbytes = os.path.getsize(filetocheck) # assert(totalbytes==1019857) nlines = count_lines_in_file(filetocheck) - assert nlines == 8491, "nlines ({}) not equal to 8491".format(nlines) + assert nlines == 8491, f"nlines ({nlines}) not equal to 8491" # upw with point scalar output output_dir = os.path.join(cpth, "UPW") @@ -184,7 +184,7 @@ def test_vtk_export_packages(): # totalbytes1 = os.path.getsize(filetocheck) # assert(totalbytes1==2559173) nlines1 = count_lines_in_file(filetocheck) - assert nlines1 == 21215, "nlines ({}) not equal to 21215".format(nlines) + assert nlines1 == 21215, f"nlines ({nlines}) not equal to 21215" # bas with smoothing on output_dir = os.path.join(cpth, "BAS") @@ -461,25 +461,25 @@ def test_vtk_vector(): # with point scalars and binary vtk.export_vector( - m, q, output_dir + "_bin", "discharge", point_scalars=True, binary=True + m, q, f"{output_dir}_bin", "discharge", point_scalars=True, binary=True ) - filetocheck = os.path.join(output_dir + "_bin", filenametocheck) + filetocheck = os.path.join(f"{output_dir}_bin", filenametocheck) # totalbytes1 = os.path.getsize(filetocheck) # assert(totalbytes1==942413) # nlines1 = count_lines_in_file(filetocheck, binary=True) # assert(nlines1==3824) - assert os.path.exists(filetocheck), "file (0) does not exist: {}".format( + assert os.path.exists( filetocheck - ) + ), f"file (0) does not exist: {filetocheck}" # with values directly given at vertices q = pp.get_specific_discharge(vectors, m, head, position="vertices") nancount = np.count_nonzero(np.isnan(q[0])) - assert nancount == 472, "nancount != 472 ({})".format(nancount) + assert nancount == 472, f"nancount != 472 ({nancount})" overall = np.nansum(q[0]) + np.nansum(q[1]) + np.nansum(q[2]) assert np.allclose( overall, -15.849639024891047 - ), "vertices overall = {}".format(overall) + ), f"vertices overall = {overall}" output_dir = os.path.join(cpth, "freyberg_vector") filenametocheck = "discharge_verts.vtu" vtk.export_vector(m, q, output_dir, "discharge_verts") @@ -487,20 +487,20 @@ def test_vtk_vector(): # totalbytes2 = os.path.getsize(filetocheck) # assert(totalbytes2==1990047) nlines2 = count_lines_in_file(filetocheck) - assert nlines2 == 10598, "nlines != 10598 ({})".format(nlines2) + assert nlines2 == 10598, f"nlines != 10598 ({nlines2})" # with values directly given at vertices and binary vtk.export_vector( - m, q, output_dir + "_bin", "discharge_verts", binary=True + m, q, f"{output_dir}_bin", "discharge_verts", binary=True ) - filetocheck = os.path.join(output_dir + "_bin", filenametocheck) + filetocheck = os.path.join(f"{output_dir}_bin", filenametocheck) # totalbytes3 = os.path.getsize(filetocheck) # assert(totalbytes3==891486) # nlines3 = count_lines_in_file(filetocheck, binary=True) # assert(nlines3==3012) - assert os.path.exists(filetocheck), "file (1) does not exist: {}".format( + assert os.path.exists( filetocheck - ) + ), f"file (1) does not exist: {filetocheck}" return @@ -528,19 +528,19 @@ def test_vtk_vti(): # totalbytes = os.path.getsize(filetocheck) # assert(totalbytes==1075) nlines = count_lines_in_file(filetocheck) - assert nlines == 17, "nlines ({}) not equal to 17".format(nlines) + assert nlines == 17, f"nlines ({nlines}) not equal to 17" # with point scalar - dis.export(output_dir + "_points", fmt="vtk", point_scalars=True) - filetocheck = os.path.join(output_dir + "_points", filenametocheck) + dis.export(f"{output_dir}_points", fmt="vtk", point_scalars=True) + filetocheck = os.path.join(f"{output_dir}_points", filenametocheck) # totalbytes1 = os.path.getsize(filetocheck) # assert(totalbytes1==2474) nlines1 = count_lines_in_file(filetocheck) - assert nlines1 == 29, "nlines1 ({}) not equal to 29".format(nlines1) + assert nlines1 == 29, f"nlines1 ({nlines1}) not equal to 29" # with binary - dis.export(output_dir + "_bin", fmt="vtk", binary=True) - filetocheck = os.path.join(output_dir + "_bin", filenametocheck) + dis.export(f"{output_dir}_bin", fmt="vtk", binary=True) + filetocheck = os.path.join(f"{output_dir}_bin", filenametocheck) # totalbytes2 = os.path.getsize(filetocheck) # assert(totalbytes2==1144) # nlines2 = count_lines_in_file(filetocheck, binary=True) @@ -554,7 +554,7 @@ def test_vtk_vti(): # totalbytes3 = os.path.getsize(filetocheck) # assert(totalbytes3==1606) nlines3 = count_lines_in_file(filetocheck) - assert nlines3 == 37, "nlines3 ({}) not equal to 37".format(nlines3) + assert nlines3 == 37, f"nlines3 ({nlines3}) not equal to 37" # force .vtu filenametocheck = "DIS.vtu" @@ -563,7 +563,7 @@ def test_vtk_vti(): # totalbytes4 = os.path.getsize(filetocheck) # assert(totalbytes4==5723) nlines4 = count_lines_in_file(filetocheck) - assert nlines4 == 125, "nlines4 ({}) not equal to 125".format(nlines4) + assert nlines4 == 125, f"nlines4 ({nlines4}) not equal to 125" # vector filenametocheck = "vect.vti" @@ -578,9 +578,9 @@ def test_vtk_vti(): # vector with point scalars and binary vtk.export_vector( - m, v, output_dir + "_bin", "vect", point_scalars=True, binary=True + m, v, f"{output_dir}_bin", "vect", point_scalars=True, binary=True ) - filetocheck = os.path.join(output_dir + "_bin", filenametocheck) + filetocheck = os.path.join(f"{output_dir}_bin", filenametocheck) # totalbytes6 = os.path.getsize(filetocheck) # assert(totalbytes6==2666) # nlines6 = count_lines_in_file(filetocheck, binary=True) @@ -607,16 +607,16 @@ def test_vtk_vtr(): assert nlines == 87 # with point scalar - m.export(output_dir + "_points", fmt="vtk", point_scalars=True) - filetocheck = os.path.join(output_dir + "_points", filenametocheck) + m.export(f"{output_dir}_points", fmt="vtk", point_scalars=True) + filetocheck = os.path.join(f"{output_dir}_points", filenametocheck) # totalbytes1 = os.path.getsize(filetocheck) # assert(totalbytes1==182168) nlines1 = count_lines_in_file(filetocheck) assert nlines1 == 121 # with binary - m.export(output_dir + "_bin", fmt="vtk", binary=True) - filetocheck = os.path.join(output_dir + "_bin", filenametocheck) + m.export(f"{output_dir}_bin", fmt="vtk", binary=True) + filetocheck = os.path.join(f"{output_dir}_bin", filenametocheck) # totalbytes2 = os.path.getsize(filetocheck) # assert(totalbytes2==47874) # nlines2 = count_lines_in_file(filetocheck, binary=True) @@ -764,28 +764,28 @@ def test_vtk_export_true2d_nonregxy(): m.run_model(silent=True) # export and check head with point scalar - hdsfile = os.path.join(output_dir, name + ".hds") + hdsfile = os.path.join(output_dir, f"{name}.hds") hds = bf.HeadFile(hdsfile) head = hds.get_data() vtk.export_array( - m, head, output_dir, name + "_head", point_scalars=True, true2d=True + m, head, output_dir, f"{name}_head", point_scalars=True, true2d=True ) - filetocheck = os.path.join(output_dir, name + "_head.vtr") + filetocheck = os.path.join(output_dir, f"{name}_head.vtr") # totalbytes = os.path.getsize(filetocheck) # assert(totalbytes==4997) nlines = count_lines_in_file(filetocheck) assert nlines == 59 # export and check specific discharge given at vertices - cbcfile = os.path.join(output_dir, name + ".cbc") + cbcfile = os.path.join(output_dir, f"{name}.cbc") cbc = bf.CellBudgetFile(cbcfile) keys = ["FLOW RIGHT FACE", "FLOW FRONT FACE"] vectors = [cbc.get_data(text=t)[0] for t in keys] q = pp.get_specific_discharge(vectors, m, position="vertices") vtk.export_vector( - m, q, output_dir, name + "_q", point_scalars=True, true2d=True + m, q, output_dir, f"{name}_q", point_scalars=True, true2d=True ) - filetocheck = os.path.join(output_dir, name + "_q.vtr") + filetocheck = os.path.join(output_dir, f"{name}_q.vtr") # totalbytes1 = os.path.getsize(filetocheck) # assert(totalbytes1==5772) nlines1 = count_lines_in_file(filetocheck) @@ -827,45 +827,45 @@ def test_vtk_export_true2d_nonregxz(): m.run_model(silent=True) # export and check head - hdsfile = os.path.join(output_dir, name + ".hds") + hdsfile = os.path.join(output_dir, f"{name}.hds") hds = bf.HeadFile(hdsfile) head = hds.get_data() - vtk.export_array(m, head, output_dir, name + "_head", true2d=True) - filetocheck = os.path.join(output_dir, name + "_head.vtu") + vtk.export_array(m, head, output_dir, f"{name}_head", true2d=True) + filetocheck = os.path.join(output_dir, f"{name}_head.vtu") # totalbytes = os.path.getsize(filetocheck) # assert(totalbytes==4217) nlines = count_lines_in_file(filetocheck) assert nlines == 105 # export and check head with point scalar - hdsfile = os.path.join(output_dir, name + ".hds") + hdsfile = os.path.join(output_dir, f"{name}.hds") hds = bf.HeadFile(hdsfile) head = hds.get_data() vtk.export_array( m, head, output_dir, - name + "_head_points", + f"{name}_head_points", point_scalars=True, true2d=True, ) - filetocheck = os.path.join(output_dir, name + "_head_points.vtu") + filetocheck = os.path.join(output_dir, f"{name}_head_points.vtu") # totalbytes1 = os.path.getsize(filetocheck) # assert(totalbytes1==6155) nlines1 = count_lines_in_file(filetocheck) assert nlines1 == 129 # export and check specific discharge given at vertices - cbcfile = os.path.join(output_dir, name + ".cbc") + cbcfile = os.path.join(output_dir, f"{name}.cbc") cbc = bf.CellBudgetFile(cbcfile) keys = ["FLOW RIGHT FACE", "FLOW LOWER FACE"] vectors = [cbc.get_data(text=t)[0] for t in keys] vectors.insert(1, None) q = pp.get_specific_discharge(vectors, m, position="vertices") vtk.export_vector( - m, q, output_dir, name + "_q", point_scalars=True, true2d=True + m, q, output_dir, f"{name}_q", point_scalars=True, true2d=True ) - filetocheck = os.path.join(output_dir, name + "_q.vtu") + filetocheck = os.path.join(output_dir, f"{name}_q.vtu") # totalbytes2 = os.path.getsize(filetocheck) # assert(totalbytes2==7036) nlines2 = count_lines_in_file(filetocheck) @@ -907,45 +907,45 @@ def test_vtk_export_true2d_nonregyz(): m.run_model(silent=True) # export and check head - hdsfile = os.path.join(output_dir, name + ".hds") + hdsfile = os.path.join(output_dir, f"{name}.hds") hds = bf.HeadFile(hdsfile) head = hds.get_data() - vtk.export_array(m, head, output_dir, name + "_head", true2d=True) - filetocheck = os.path.join(output_dir, name + "_head.vtu") + vtk.export_array(m, head, output_dir, f"{name}_head", true2d=True) + filetocheck = os.path.join(output_dir, f"{name}_head.vtu") # totalbytes = os.path.getsize(filetocheck) # assert(totalbytes==4217) nlines = count_lines_in_file(filetocheck) assert nlines == 105 # export and check head with point scalar - hdsfile = os.path.join(output_dir, name + ".hds") + hdsfile = os.path.join(output_dir, f"{name}.hds") hds = bf.HeadFile(hdsfile) head = hds.get_data() vtk.export_array( m, head, output_dir, - name + "_head_points", + f"{name}_head_points", point_scalars=True, true2d=True, ) - filetocheck = os.path.join(output_dir, name + "_head_points.vtu") + filetocheck = os.path.join(output_dir, f"{name}_head_points.vtu") # totalbytes1 = os.path.getsize(filetocheck) # assert(totalbytes1==6155) nlines1 = count_lines_in_file(filetocheck) assert nlines1 == 129 # export and check specific discharge given at vertices - cbcfile = os.path.join(output_dir, name + ".cbc") + cbcfile = os.path.join(output_dir, f"{name}.cbc") cbc = bf.CellBudgetFile(cbcfile) keys = ["FLOW FRONT FACE", "FLOW LOWER FACE"] vectors = [cbc.get_data(text=t)[0] for t in keys] vectors.insert(0, None) q = pp.get_specific_discharge(vectors, m, position="vertices") vtk.export_vector( - m, q, output_dir, name + "_q", point_scalars=True, true2d=True + m, q, output_dir, f"{name}_q", point_scalars=True, true2d=True ) - filetocheck = os.path.join(output_dir, name + "_q.vtu") + filetocheck = os.path.join(output_dir, f"{name}_q.vtu") # totalbytes2 = os.path.getsize(filetocheck) # assert(totalbytes2==7032) nlines2 = count_lines_in_file(filetocheck) diff --git a/autotest/t051_test.py b/autotest/t051_test.py index 7f84cc52f..7a5c8a289 100644 --- a/autotest/t051_test.py +++ b/autotest/t051_test.py @@ -40,7 +40,7 @@ def test_mfcbc(): spd = {(0, 0): ["save head", "save budget"]} oc = flopy.modflow.ModflowOc(m, stress_period_data=spd) t = oc.get_budgetunit() - assert t == [100, 101], "budget units are {}".format(t) + " not [100, 101]" + assert t == [100, 101], f"budget units are {t} not [100, 101]" nlay = 3 nrow = 3 @@ -61,8 +61,8 @@ def test_mfcbc(): oc.reset_budgetunit(budgetunit=1053, fname="big.bin") msg = ( - "wel ipakcb ({}) ".format(wel.ipakcb) - + "not set correctly to 1053 using oc.resetbudgetunit()" + f"wel ipakcb ({wel.ipakcb}) " + "not set correctly to 1053 using oc.resetbudgetunit()" ) assert wel.ipakcb == 1053, msg diff --git a/autotest/t052_test.py b/autotest/t052_test.py index 4949f7b5a..33da8b3d4 100644 --- a/autotest/t052_test.py +++ b/autotest/t052_test.py @@ -67,11 +67,11 @@ def test_binary_well(): if run: success, buff = ml.run_model(silent=False) assert success, "could not run MODFLOW-2005 model" - fn0 = os.path.join(cpth, mfnam + ".nam") + fn0 = os.path.join(cpth, f"{mfnam}.nam") # load the model m = flopy.modflow.Modflow.load( - mfnam + ".nam", model_ws=cpth, verbose=True, exe_name=exe_name + f"{mfnam}.nam", model_ws=cpth, verbose=True, exe_name=exe_name ) wl = m.wel.stress_period_data[0] @@ -100,13 +100,11 @@ def test_binary_well(): if run: success, buff = m.run_model(silent=False) assert success, "could not run the new MODFLOW-2005 model" - fn1 = os.path.join(pth, mfnam + ".nam") + fn1 = os.path.join(pth, f"{mfnam}.nam") # compare the files if run: - fsum = os.path.join( - cpth, "{}.head.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(cpth, f"{os.path.splitext(mfnam)[0]}.head.out") success = False try: success = pymake.compare_heads(fn0, fn1, outfile=fsum) @@ -115,9 +113,7 @@ def test_binary_well(): assert success, "head comparison failure" - fsum = os.path.join( - cpth, "{}.budget.out".format(os.path.splitext(mfnam)[0]) - ) + fsum = os.path.join(cpth, f"{os.path.splitext(mfnam)[0]}.budget.out") success = False try: success = pymake.compare_budget( diff --git a/autotest/t054_test_mfnwt.py b/autotest/t054_test_mfnwt.py index 784a74cdd..988a26ff1 100644 --- a/autotest/t054_test_mfnwt.py +++ b/autotest/t054_test_mfnwt.py @@ -64,7 +64,7 @@ def mfnwt_model(namfile, model_ws): check=False, exe_name=mfnwt_exe, ) - assert m, "Could not load namefile {}".format(namfile) + assert m, f"Could not load namefile {namfile}" assert m.load_fail is False # convert to MODFLOW-NWT model m.set_version("mfnwt") @@ -139,7 +139,7 @@ def mfnwt_model(namfile, model_ws): check=False, exe_name=mfnwt_exe, ) - assert m, "Could not load namefile {}".format(namfile) + assert m, f"Could not load namefile {namfile}" assert m.load_fail is False # change workspace and write MODFLOW-NWT model @@ -155,7 +155,7 @@ def mfnwt_model(namfile, model_ws): fn1 = os.path.join(pthf, namfile) if run: - fsum = os.path.join(pth, "{}.head.out".format(tdir)) + fsum = os.path.join(pth, f"{tdir}.head.out") success = False try: success = pymake.compare_heads(fn0, fn1, outfile=fsum) @@ -165,7 +165,7 @@ def mfnwt_model(namfile, model_ws): assert success, "head comparison failure" - fsum = os.path.join(pth, "{}.budget.out".format(tdir)) + fsum = os.path.join(pth, f"{tdir}.budget.out") success = False try: success = pymake.compare_budget( diff --git a/autotest/t057_test_mp7.py b/autotest/t057_test_mp7.py index 37400d0f9..1c9d28672 100644 --- a/autotest/t057_test_mp7.py +++ b/autotest/t057_test_mp7.py @@ -114,11 +114,8 @@ def test_pathline_output(): # check maxid msg = ( - "pathline maxid ({}) ".format(maxid0) - + "in {} ".format(os.path.basename(fpth0)) - + "are not equal to the " - + "pathline maxid ({}) ".format(maxid1) - + "in {}".format(os.path.basename(fpth1)) + f"pathline maxid ({maxid0}) in {os.path.basename(fpth0)} are not " + f"equal to the pathline maxid ({maxid1}) in {os.path.basename(fpth1)}" ) assert maxid0 == maxid1, msg @@ -160,11 +157,8 @@ def test_endpoint_output(): # check maxid msg = ( - "endpoint maxid ({}) ".format(maxid0) - + "in {} ".format(os.path.basename(fpth0)) - + "are not equal to the " - + "endpoint maxid ({}) ".format(maxid1) - + "in {}".format(os.path.basename(fpth1)) + f"endpoint maxid ({maxid0}) in {os.path.basename(fpth0)} are not " + f"equal to the endpoint maxid ({maxid1}) in {os.path.basename(fpth1)}" ) assert maxid0 == maxid1, msg @@ -198,9 +192,8 @@ def test_endpoint_output(): ) d = np.rec.fromarrays((e0[name] - e1[name] for name in names), dtype=dtype) msg = ( - "endpoints in {} ".format(os.path.basename(fpth0)) - + "are not equal (within 1e-5) to the " - + "endpoints in {}".format(os.path.basename(fpth1)) + f"endpoints in {os.path.basename(fpth0)} are not equal (within 1e-5) " + f"to the endpoints in {os.path.basename(fpth1)}" ) # assert not np.allclose(t0, t1), msg @@ -264,7 +257,7 @@ def build_mf2005(): # create modpath files exe_name = exe_names["mp7"] mp = flopy.modpath.Modpath7( - modelname=nm + "_mp", flowmodel=m, exe_name=exe_name, model_ws=ws + modelname=f"{nm}_mp", flowmodel=m, exe_name=exe_name, model_ws=ws ) mpbas = flopy.modpath.Modpath7Bas( mp, porosity=0.1, defaultiface=defaultiface @@ -292,7 +285,7 @@ def build_mf2005(): # run modpath if run: success, buff = mp.run_model() - assert success, "mp7 model ({}) did not run".format(mp.name) + assert success, f"mp7 model ({mp.name}) did not run" return @@ -318,7 +311,7 @@ def build_mf6(): ) # Create the Flopy groundwater flow (gwf) model object - model_nam_file = "{}.nam".format(nm) + model_nam_file = f"{nm}.nam" gwf = flopy.mf6.ModflowGwf( sim, modelname=nm, model_nam_file=model_nam_file, save_flows=True ) @@ -369,9 +362,9 @@ def build_mf6(): rd.append([(0, i, ncol - 1), riv_h, riv_c, riv_z]) flopy.mf6.modflow.mfgwfriv.ModflowGwfriv(gwf, stress_period_data={0: rd}) # Create the output control package - headfile = "{}.hds".format(nm) + headfile = f"{nm}.hds" head_record = [headfile] - budgetfile = "{}.cbb".format(nm) + budgetfile = f"{nm}.cbb" budget_record = [budgetfile] saverecord = [("HEAD", "ALL"), ("BUDGET", "ALL")] oc = flopy.mf6.modflow.mfgwfoc.ModflowGwfoc( @@ -393,7 +386,7 @@ def build_mf6(): # create modpath files exe_name = exe_names["mp7"] mp = flopy.modpath.Modpath7( - modelname=nm + "_mp", flowmodel=gwf, exe_name=exe_name, model_ws=ws + modelname=f"{nm}_mp", flowmodel=gwf, exe_name=exe_name, model_ws=ws ) mpbas = flopy.modpath.Modpath7Bas( mp, porosity=0.1, defaultiface=defaultiface6 @@ -421,7 +414,7 @@ def build_mf6(): # run modpath if run: success, buff = mp.run_model() - assert success, "mp7 model ({}) did not run".format(mp.name) + assert success, f"mp7 model ({mp.name}) did not run" return diff --git a/autotest/t058_test_mp7.py b/autotest/t058_test_mp7.py index 0ec020389..0b2ebda23 100644 --- a/autotest/t058_test_mp7.py +++ b/autotest/t058_test_mp7.py @@ -63,14 +63,14 @@ def test_mf6(): def test_default_modpath(): - mpnam = nm + "_mp_default" + mpnam = f"{nm}_mp_default" pg = flopy.modpath.ParticleGroup(particlegroupname="DEFAULT") build_modpath(mpnam, pg) return def test_faceparticles_is1(): - mpnam = nm + "_mp_face_t1node" + mpnam = f"{nm}_mp_face_t1node" locs = [] localx = [] localy = [] @@ -84,7 +84,7 @@ def test_faceparticles_is1(): p = flopy.modpath.ParticleData( locs, structured=False, drape=0, localx=localx, localy=localy, localz=1 ) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroup( particlegroupname="T1NODEPG", particledata=p, filename=fpth ) @@ -93,7 +93,7 @@ def test_faceparticles_is1(): def test_facenode_is3(): - mpnam = nm + "_mp_face_t3node" + mpnam = f"{nm}_mp_face_t3node" locs = [] for i in range(nrow): for j in range(ncol): @@ -115,7 +115,7 @@ def test_facenode_is3(): columndivisions6=3, ) p = flopy.modpath.NodeParticleData(subdivisiondata=sd, nodes=locs) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroupNodeTemplate( particlegroupname="T3NODEPG", particledata=p, filename=fpth ) @@ -124,7 +124,7 @@ def test_facenode_is3(): def test_facenode_is3a(): - mpnam = nm + "_mp_face_t3anode" + mpnam = f"{nm}_mp_face_t3anode" locsa = [] for i in range(11): for j in range(ncol): @@ -153,7 +153,7 @@ def test_facenode_is3a(): p = flopy.modpath.NodeParticleData( subdivisiondata=[sd, sd], nodes=[locsa, locsb] ) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroupNodeTemplate( particlegroupname="T3ANODEPG", particledata=p, filename=fpth ) @@ -162,7 +162,7 @@ def test_facenode_is3a(): def test_facenode_is2a(): - mpnam = nm + "_mp_face_t2anode" + mpnam = f"{nm}_mp_face_t2anode" locsa = [[0, 0, 0, 0, 10, ncol - 1]] locsb = [[0, 11, 0, 0, nrow - 1, ncol - 1]] sd = flopy.modpath.FaceDataType( @@ -183,7 +183,7 @@ def test_facenode_is2a(): p = flopy.modpath.LRCParticleData( subdivisiondata=[sd, sd], lrcregions=[locsa, locsb] ) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroupNodeTemplate( particlegroupname="T2ANODEPG", particledata=p, filename=fpth ) @@ -192,7 +192,7 @@ def test_facenode_is2a(): def test_cellparticles_is1(): - mpnam = nm + "_mp_cell_t1node" + mpnam = f"{nm}_mp_cell_t1node" locs = [] for k in range(nlay): for i in range(nrow): @@ -202,7 +202,7 @@ def test_cellparticles_is1(): p = flopy.modpath.ParticleData( locs, structured=False, drape=0, localx=0.5, localy=0.5, localz=0.5 ) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroup( particlegroupname="T1NODEPG", particledata=p, filename=fpth ) @@ -211,7 +211,7 @@ def test_cellparticles_is1(): def test_cellparticleskij_is1(): - mpnam = nm + "_mp_cell_t1kij" + mpnam = f"{nm}_mp_cell_t1kij" locs = [] for k in range(nlay): for i in range(nrow): @@ -220,7 +220,7 @@ def test_cellparticleskij_is1(): p = flopy.modpath.ParticleData( locs, structured=True, drape=0, localx=0.5, localy=0.5, localz=0.5 ) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroup( particlegroupname="T1KIJPG", particledata=p, filename=fpth ) @@ -229,7 +229,7 @@ def test_cellparticleskij_is1(): def test_cellnode_is3(): - mpnam = nm + "_mp_cell_t3node" + mpnam = f"{nm}_mp_cell_t3node" locs = [] for k in range(nlay): for i in range(nrow): @@ -243,7 +243,7 @@ def test_cellnode_is3(): layercelldivisions=1, ) p = flopy.modpath.NodeParticleData(subdivisiondata=sd, nodes=locs) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroupNodeTemplate( particlegroupname="T3CELLPG", particledata=p, filename=fpth ) @@ -252,7 +252,7 @@ def test_cellnode_is3(): def test_cellnode_is3a(): - mpnam = nm + "_mp_cell_t3anode" + mpnam = f"{nm}_mp_cell_t3anode" locsa = [] for k in range(1): for i in range(nrow): @@ -280,7 +280,7 @@ def test_cellnode_is3a(): p = flopy.modpath.NodeParticleData( subdivisiondata=[sd, sd, sd], nodes=[locsa, locsb, locsc] ) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroupNodeTemplate( particlegroupname="T3ACELLPG", particledata=p, filename=fpth ) @@ -289,7 +289,7 @@ def test_cellnode_is3a(): def test_cellnode_is2a(): - mpnam = nm + "_mp_cell_t2anode" + mpnam = f"{nm}_mp_cell_t2anode" locsa = [ [0, 0, 0, 0, nrow - 1, ncol - 1], [1, 0, 0, 1, nrow - 1, ncol - 1], @@ -304,7 +304,7 @@ def test_cellnode_is2a(): p = flopy.modpath.LRCParticleData( subdivisiondata=[sd, sd], lrcregions=[locsa, locsb] ) - fpth = mpnam + ".sloc" + fpth = f"{mpnam}.sloc" pg = flopy.modpath.ParticleGroupLRCTemplate( particlegroupname="T2ACELLPG", particledata=p, filename=fpth ) @@ -374,31 +374,25 @@ def endpoint_compare(fpth0, epf): # check maxid msg = ( - "endpoint maxid ({}) ".format(maxid0) - + "in {} ".format(os.path.basename(fpth0)) - + "are not equal to the " - + "endpoint maxid ({}) ".format(maxid1) - + "in {}".format(os.path.basename(fpth1)) + f"endpoint maxid ({maxid0}) in {os.path.basename(fpth0)} " + f"are not equal to the endpoint maxid ({maxid1}) " + f"in {os.path.basename(fpth1)}" ) assert maxid0 == maxid1, msg # check maxtravel msg = ( - "endpoint maxtraveltime ({}) ".format(maxtravel0) - + "in {} ".format(os.path.basename(fpth0)) - + "are not equal to the " - + "endpoint maxtraveltime ({}) ".format(maxtravel1) - + "in {}".format(os.path.basename(fpth1)) + f"endpoint maxtraveltime ({maxtravel0}) " + f"in {os.path.basename(fpth0)} are not equal to the endpoint " + f"maxtraveltime ({maxtravel1}) in {os.path.basename(fpth1)}" ) assert maxtravel0 == maxtravel1, msg # check maxtimes msg = ( - "endpoint maxtime ({}) ".format(maxtime0) - + "in {} ".format(os.path.basename(fpth0)) - + "are not equal to the " - + "endpoint maxtime ({}) ".format(maxtime1) - + "in {}".format(os.path.basename(fpth1)) + f"endpoint maxtime ({maxtime0}) in {os.path.basename(fpth0)} " + f"are not equal to the endpoint maxtime ({maxtime1}) " + f"in {os.path.basename(fpth1)}" ) assert maxtime0 == maxtime1, msg @@ -406,10 +400,9 @@ def endpoint_compare(fpth0, epf): t1 = np.rec.fromarrays((e1[name] for name in names), dtype=dtype) for name in names: msg = ( - "endpoints in {} ".format(os.path.basename(fpth0)) - + "are not equal (within 1e-5) to the " - + "endpoints in {} ".format(os.path.basename(fpth1)) - + "for column {}.".format(name) + f"endpoints in {os.path.basename(fpth0)} are not equal " + f"(within 1e-5) to the endpoints in {os.path.basename(fpth1)} " + f"for column {name}." ) assert np.allclose(t0[name], t1[name]), msg @@ -435,7 +428,7 @@ def build_mf6(): ) # Create the Flopy groundwater flow (gwf) model object - model_nam_file = "{}.nam".format(nm) + model_nam_file = f"{nm}.nam" gwf = flopy.mf6.ModflowGwf( sim, modelname=nm, model_nam_file=model_nam_file, save_flows=True ) @@ -479,9 +472,9 @@ def build_mf6(): rd.append([(0, i, ncol - 1), riv_h, riv_c, riv_z]) flopy.mf6.modflow.mfgwfriv.ModflowGwfriv(gwf, stress_period_data={0: rd}) # Create the output control package - headfile = "{}.hds".format(nm) + headfile = f"{nm}.hds" head_record = [headfile] - budgetfile = "{}.cbb".format(nm) + budgetfile = f"{nm}.cbb" budget_record = [budgetfile] saverecord = [("HEAD", "ALL"), ("BUDGET", "ALL")] oc = flopy.mf6.modflow.mfgwfoc.ModflowGwfoc( @@ -530,7 +523,7 @@ def build_modpath(mpn, particlegroups): # run modpath if run: success, buff = mp.run_model() - assert success, "mp7 model ({}) did not run".format(mp.name) + assert success, f"mp7 model ({mp.name}) did not run" return diff --git a/autotest/t059_test_mp7.py b/autotest/t059_test_mp7.py index 9e2834f34..0570e759c 100644 --- a/autotest/t059_test_mp7.py +++ b/autotest/t059_test_mp7.py @@ -42,7 +42,7 @@ def test_mf6(): def test_forward(): - mpnam = nm + "_mp_forward" + mpnam = f"{nm}_mp_forward" exe_name = exe_names["mp7"] # load the MODFLOW 6 model @@ -66,7 +66,7 @@ def test_forward(): def test_backward(): - mpnam = nm + "_mp_backward" + mpnam = f"{nm}_mp_backward" exe_name = exe_names["mp7"] # load the MODFLOW 6 model @@ -108,7 +108,7 @@ def build_mf6(): ) # Create the Flopy groundwater flow (gwf) model object - model_nam_file = "{}.nam".format(nm) + model_nam_file = f"{nm}.nam" gwf = flopy.mf6.ModflowGwf( sim, modelname=nm, model_nam_file=model_nam_file, save_flows=True ) @@ -152,9 +152,9 @@ def build_mf6(): rd.append([(0, i, ncol - 1), riv_h, riv_c, riv_z]) flopy.mf6.modflow.mfgwfriv.ModflowGwfriv(gwf, stress_period_data={0: rd}) # Create the output control package - headfile = "{}.hds".format(nm) + headfile = f"{nm}.hds" head_record = [headfile] - budgetfile = "{}.cbb".format(nm) + budgetfile = f"{nm}.cbb" budget_record = [budgetfile] saverecord = [("HEAD", "ALL"), ("BUDGET", "ALL")] oc = flopy.mf6.modflow.mfgwfoc.ModflowGwfoc( @@ -181,7 +181,7 @@ def build_modpath(mp): # run modpath if run: success, buff = mp.run_model() - assert success, "mp7 model ({}) did not run".format(mp.name) + assert success, f"mp7 model ({mp.name}) did not run" return diff --git a/autotest/t060_test_lkt.py b/autotest/t060_test_lkt.py index fc4f82000..dd6d32745 100644 --- a/autotest/t060_test_lkt.py +++ b/autotest/t060_test_lkt.py @@ -17324,7 +17324,7 @@ def test_lkt_with_multispecies(): mt.write_input() # Make sure the just written files are loadable - namfile = modelname + ".nam" + namfile = f"{modelname}.nam" mf = flopy.modflow.Modflow.load( namfile, model_ws=tpth, @@ -17332,7 +17332,7 @@ def test_lkt_with_multispecies(): verbose=True, exe_name=mfnwt_exe, ) - namfile = modelname + ".mtnam" + namfile = f"{modelname}.mtnam" mt = flopy.mt3d.mt.Mt3dms.load( namfile, model_ws=tpth, diff --git a/autotest/t061_test_gridgen.py b/autotest/t061_test_gridgen.py index d04fe4a1d..fce063f6d 100644 --- a/autotest/t061_test_gridgen.py +++ b/autotest/t061_test_gridgen.py @@ -191,10 +191,7 @@ def test_gridgen(): points = [(4750.0, 5250.0)] cells = g.intersect(points, "point", 0) n = cells["nodenumber"][0] - msg = ( - "gridgen point intersect did not identify the correct " - "cell {} <> {}".format(n, 308) - ) + msg = f"gridgen point intersect did not identify the correct cell {n} <> 308" assert n == 308, msg # test the gridgen line intersection diff --git a/autotest/t062_test_intersect.py b/autotest/t062_test_intersect.py index 1a8f64c22..41d43feef 100644 --- a/autotest/t062_test_intersect.py +++ b/autotest/t062_test_intersect.py @@ -151,20 +151,10 @@ def test_intersection(): pass else: # should be forgiving x,y out of grid raise e - print( - "x={},y={} in dis is in row {} and col {}, so...".format( - x, y, row, col - ) - ) + print(f"x={x},y={y} in dis is in row {row} and col {col}, so...") cell2d_dis = row * ml_dis.modelgrid.ncol + col - print( - "x={},y={} in dis is in cell2d-number {}".format(x, y, cell2d_dis) - ) - print( - "x={},y={} in disv is in cell2d-number {}".format( - x, y, cell2d_disv - ) - ) + print(f"x={x},y={y} in dis is in cell2d-number {cell2d_dis}") + print(f"x={x},y={y} in disv is in cell2d-number {cell2d_disv}") if not forgive: assert cell2d_dis == cell2d_disv diff --git a/autotest/t063_test_lgrutil.py b/autotest/t063_test_lgrutil.py index 3d038d820..a2757c555 100644 --- a/autotest/t063_test_lgrutil.py +++ b/autotest/t063_test_lgrutil.py @@ -62,7 +62,7 @@ def test_lgrutil(): assert topc.shape == (9, 9) assert botmc.shape == (2, 9, 9) assert topc.min() == topc.max() == 100.0 - errmsg = "{} /= {}".format(botmc[:, 0, 0], np.array(botmp[:2])) + errmsg = f"{botmc[:, 0, 0]} /= {np.array(botmp[:2])}" assert np.allclose(botmc[:, 0, 0], np.array(botmp[:2])), errmsg # exchange data @@ -78,7 +78,7 @@ def test_lgrutil(): 0.0, 354.33819375782156, ] - errmsg = "{} /= {}".format(ans1, exchange_data[0]) + errmsg = f"{ans1} /= {exchange_data[0]}" assert exchange_data[0] == ans1, errmsg ans2 = [ @@ -91,7 +91,7 @@ def test_lgrutil(): 180.0, 100.0, ] - errmsg = "{} /= {}".format(ans2, exchange_data[-1]) + errmsg = f"{ans2} /= {exchange_data[-1]}" assert exchange_data[-1] == ans2, errmsg errmsg = "exchanges should be 71 horizontal plus 81 vertical" diff --git a/autotest/t064_test_performance.py b/autotest/t064_test_performance.py index 2d325eb32..291f2ff02 100644 --- a/autotest/t064_test_performance.py +++ b/autotest/t064_test_performance.py @@ -82,10 +82,8 @@ def test_init_time(self): target = 0.3 # seconds assert ( mfp.init_time < target - ), "model init took {:.2f}s, should take {:.1f}s".format( - mfp.init_time, target - ) - print("setting up model took {:.2f}s".format(mfp.init_time)) + ), f"model init took {mfp.init_time:.2f}s, should take {target:.1f}s" + print(f"setting up model took {mfp.init_time:.2f}s") def test_0_write_time(self): """test write time""" @@ -100,10 +98,10 @@ def test_0_write_time(self): mfp.m.write_input() t1 = time.time() - t0 if assert_time: - assert t1 < target, "model write took {:.2f}s, ".format( - t1 - ) + "should take {:.1f}s".format(target) - print("writing input took {:.2f}s".format(t1)) + assert ( + t1 < target + ), f"model write took {t1:.2f}s, should take {target:.1f}s" + print(f"writing input took {t1:.2f}s") def test_9_load_time(self): """test model load time""" @@ -112,13 +110,13 @@ def test_9_load_time(self): target = 3 t0 = time.time() m = fm.Modflow.load( - "{}.nam".format(mfp.modelname), model_ws=mfp.model_ws, check=False + f"{mfp.modelname}.nam", model_ws=mfp.model_ws, check=False ) t1 = time.time() - t0 assert ( t1 < target - ), "model load took {:.2f}s, should take {:.1f}s".format(t1, target) - print("loading the model took {:.2f}s".format(t1)) + ), f"model load took {t1:.2f}s, should take {target:.1f}s" + print(f"loading the model took {t1:.2f}s") @classmethod def teardown_class(cls): diff --git a/autotest/t065_test_gridintersect.py b/autotest/t065_test_gridintersect.py index 3bb10540d..63d2f6596 100644 --- a/autotest/t065_test_gridintersect.py +++ b/autotest/t065_test_gridintersect.py @@ -131,7 +131,7 @@ def plot_vertex_grid(tgr): def plot_ix_polygon_result(rec, ax): for i, ishp in enumerate(rec.ixshapes): - ppi = PolygonPatch(ishp, facecolor="C{}".format(i % 10)) + ppi = PolygonPatch(ishp, facecolor=f"C{i % 10}") ax.add_patch(ppi) @@ -139,9 +139,9 @@ def plot_ix_linestring_result(rec, ax): for i, ishp in enumerate(rec.ixshapes): if ishp.type == "MultiLineString": for part in ishp: - ax.plot(part.xy[0], part.xy[1], ls="-", c="C{}".format(i % 10)) + ax.plot(part.xy[0], part.xy[1], ls="-", c=f"C{i % 10}") else: - ax.plot(ishp.xy[0], ishp.xy[1], ls="-", c="C{}".format(i % 10)) + ax.plot(ishp.xy[0], ishp.xy[1], ls="-", c=f"C{i % 10}") def plot_ix_point_result(rec, ax): diff --git a/autotest/t067_test_ulstrd.py b/autotest/t067_test_ulstrd.py index dacf0ac49..7ff0ea3ae 100644 --- a/autotest/t067_test_ulstrd.py +++ b/autotest/t067_test_ulstrd.py @@ -59,27 +59,25 @@ def test_ulstrd(): # rewrite ghb fname = os.path.join(ws, "original.ghb") with open(fname, "w") as f: - f.write("{} {}\n".format(ghbra.shape[0], 0)) + f.write(f"{ghbra.shape[0]} 0\n") for kper in range(nper): - f.write("{} {}\n".format(ghbra.shape[0], 0)) + f.write(f"{ghbra.shape[0]} 0\n") f.write("open/close original.ghb.dat\n") # write ghb list sfacghb = 5 fname = os.path.join(ws, "original.ghb.dat") with open(fname, "w") as f: - f.write("sfac {}\n".format(sfacghb)) + f.write(f"sfac {sfacghb}\n") for k, i, j, stage, cond in ghbra: - f.write( - "{} {} {} {} {}\n".format(k + 1, i + 1, j + 1, stage, cond) - ) + f.write(f"{k + 1} {i + 1} {j + 1} {stage} {cond}\n") # rewrite drn fname = os.path.join(ws, "original.drn") with open(fname, "w") as f: - f.write("{} {}\n".format(drnra.shape[0], 0)) + f.write(f"{drnra.shape[0]} 0\n") for kper in range(nper): - f.write("{} {}\n".format(drnra.shape[0], 0)) + f.write(f"{drnra.shape[0]} 0\n") f.write("external 71\n") # write drn list @@ -87,18 +85,16 @@ def test_ulstrd(): fname = os.path.join(ws, "original.drn.dat") with open(fname, "w") as f: for kper in range(nper): - f.write("sfac {}\n".format(sfacdrn)) + f.write(f"sfac {sfacdrn}\n") for k, i, j, stage, cond in drnra: - f.write( - "{} {} {} {} {}\n".format(k + 1, i + 1, j + 1, stage, cond) - ) + f.write(f"{k + 1} {i + 1} {j + 1} {stage} {cond}\n") # rewrite wel fname = os.path.join(ws, "original.wel") with open(fname, "w") as f: - f.write("{} {}\n".format(drnra.shape[0], 0)) + f.write(f"{drnra.shape[0]} 0\n") for kper in range(nper): - f.write("{} {}\n".format(drnra.shape[0], 0)) + f.write(f"{drnra.shape[0]} 0\n") f.write("external 72 (binary)\n") # create the wells, but use an all float dtype to write a binary file diff --git a/autotest/t070_test_quasi3layers.py b/autotest/t070_test_quasi3layers.py index 32888eae3..3fa8bfc00 100644 --- a/autotest/t070_test_quasi3layers.py +++ b/autotest/t070_test_quasi3layers.py @@ -89,12 +89,10 @@ def test_plotting_with_quasi3d_layers(): assert success, "test_plotting_with_quasi3d_layers() failed" # read output - hf = flopy.utils.HeadFile( - os.path.join(mf.model_ws, "{}.hds".format(mf.name)) - ) + hf = flopy.utils.HeadFile(os.path.join(mf.model_ws, f"{mf.name}.hds")) head = hf.get_data(totim=1.0) cbb = flopy.utils.CellBudgetFile( - os.path.join(mf.model_ws, "{}.cbc".format(mf.name)) + os.path.join(mf.model_ws, f"{mf.name}.cbc") ) frf = cbb.get_data(text="FLOW RIGHT FACE", totim=1.0)[0] fff = cbb.get_data(text="FLOW FRONT FACE", totim=1.0)[0] diff --git a/autotest/t072_test_spedis.py b/autotest/t072_test_spedis.py index da6b395e8..69e54116e 100644 --- a/autotest/t072_test_spedis.py +++ b/autotest/t072_test_spedis.py @@ -23,12 +23,12 @@ postproc_test_ws = os.path.join(".", "temp", "t072") modelws_mf2005 = os.path.join(postproc_test_ws, modelname_mf2005) modelws_mf6 = os.path.join(postproc_test_ws, modelname_mf6) -cbcfile_mf2005 = os.path.join(modelws_mf2005, modelname_mf2005 + ".cbc") -cbcfile_mf6 = os.path.join(modelws_mf6, modelname_mf6 + ".cbc") -hdsfile_mf2005 = os.path.join(modelws_mf2005, modelname_mf2005 + ".hds") -hdsfile_mf6 = os.path.join(modelws_mf6, modelname_mf6 + ".hds") -namfile_mf2005 = os.path.join(modelws_mf2005, modelname_mf2005 + ".nam") -namfile_mf6 = os.path.join(modelws_mf6, modelname_mf6 + ".nam") +cbcfile_mf2005 = os.path.join(modelws_mf2005, f"{modelname_mf2005}.cbc") +cbcfile_mf6 = os.path.join(modelws_mf6, f"{modelname_mf6}.cbc") +hdsfile_mf2005 = os.path.join(modelws_mf2005, f"{modelname_mf2005}.hds") +hdsfile_mf6 = os.path.join(modelws_mf6, f"{modelname_mf6}.hds") +namfile_mf2005 = os.path.join(modelws_mf2005, f"{modelname_mf2005}.nam") +namfile_mf6 = os.path.join(modelws_mf6, f"{modelname_mf6}.nam") # model domain, grid definition and properties Lx = 100.0 @@ -200,7 +200,7 @@ def build_model_mf6(): gwf = flopy.mf6.ModflowGwf( sim, modelname=modelname_mf6, - model_nam_file="{}.nam".format(modelname_mf6), + model_nam_file=f"{modelname_mf6}.nam", ) gwf.name_file.save_flows = True @@ -294,8 +294,8 @@ def build_model_mf6(): oc = flopy.mf6.ModflowGwfoc( gwf, pname="oc", - budget_filerecord="{}.cbc".format(modelname_mf6), - head_filerecord="{}.hds".format(modelname_mf6), + budget_filerecord=f"{modelname_mf6}.cbc", + head_filerecord=f"{modelname_mf6}.hds", headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")], saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], printrecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], diff --git a/autotest/t073_test_cvfd.py b/autotest/t073_test_cvfd.py index 784cea902..133d8a469 100644 --- a/autotest/t073_test_cvfd.py +++ b/autotest/t073_test_cvfd.py @@ -69,7 +69,7 @@ def test_tocvfd3(): # spot check information for cell 28 (zero based) answer = [28, 250.0, 150.0, 7, 38, 142, 143, 45, 46, 44, 38] for i, j in zip(cell2d[28], answer): - assert i == j, "{} not equal {}".format(i, j) + assert i == j, f"{i} not equal {j}" if __name__ == "__main__": diff --git a/autotest/t075_ugridtests.py b/autotest/t075_ugridtests.py index 8ed64558b..51a3499cd 100644 --- a/autotest/t075_ugridtests.py +++ b/autotest/t075_ugridtests.py @@ -76,9 +76,9 @@ def test_unstructured_minimal_grid(): [(2.0, 1), (2.0, 0.0)], [(2.0, 0), (1.0, 0.0)], ] - assert g.grid_lines == grid_lines, "\n{} \n /= \n{}".format( - g.grid_lines, grid_lines - ) + assert ( + g.grid_lines == grid_lines + ), f"\n{g.grid_lines} \n /= \n{grid_lines}" assert g.extent == (0, 2, 0, 1) xv, yv, zv = g.xyzvertices assert xv == [[0, 1, 1, 0], [1, 2, 2, 1]] @@ -142,9 +142,9 @@ def test_unstructured_complete_grid(): ], } assert isinstance(g.grid_lines, dict) - assert g.grid_lines == grid_lines, "\n{} \n /= \n{}".format( - g.grid_lines, grid_lines - ) + assert ( + g.grid_lines == grid_lines + ), f"\n{g.grid_lines} \n /= \n{grid_lines}" assert g.extent == (0, 2, 0, 1) xv, yv, zv = g.xyzvertices assert xv == [[0, 1, 1, 0], [1, 2, 2, 1]] @@ -159,9 +159,9 @@ def test_loading_argus_meshes(): fnames = [fname for fname in os.listdir(datapth) if fname.endswith(".exp")] for fname in fnames: fname = os.path.join(datapth, fname) - print("Loading Argus mesh ({}) into UnstructuredGrid".format(fname)) + print(f"Loading Argus mesh ({fname}) into UnstructuredGrid") g = UnstructuredGrid.from_argus_export(fname) - print(" Number of nodes: {}".format(g.nnodes)) + print(f" Number of nodes: {g.nnodes}") def test_create_unstructured_grid_from_verts(): @@ -170,7 +170,7 @@ def test_create_unstructured_grid_from_verts(): # simple functions to load vertices and incidence lists def load_verts(fname): - print("Loading vertices from: {}".format(fname)) + print(f"Loading vertices from: {fname}") verts = np.genfromtxt( fname, dtype=[int, float, float], names=["iv", "x", "y"] ) @@ -178,7 +178,7 @@ def load_verts(fname): return verts def load_iverts(fname): - print("Loading iverts from: {}".format(fname)) + print(f"Loading iverts from: {fname}") f = open(fname, "r") iverts = [] xc = [] diff --git a/autotest/t078_lake_connections.py b/autotest/t078_lake_connections.py index 61f24c414..aa4b433bc 100644 --- a/autotest/t078_lake_connections.py +++ b/autotest/t078_lake_connections.py @@ -21,12 +21,12 @@ def __export_ascii_grid(modelgrid, file_path, v, nodata=0.0): xcenters = modelgrid.xcellcenters[0, :] cellsize = xcenters[1] - xcenters[0] with open(file_path, "w") as f: - f.write("NCOLS {}\n".format(shape[1])) - f.write("NROWS {}\n".format(shape[0])) - f.write("XLLCENTER {}\n".format(modelgrid.xoffset + 0.5 * cellsize)) - f.write("YLLCENTER {}\n".format(modelgrid.yoffset + 0.5 * cellsize)) - f.write("CELLSIZE {}\n".format(cellsize)) - f.write("NODATA_VALUE {}\n".format(nodata)) + f.write(f"NCOLS {shape[1]}\n") + f.write(f"NROWS {shape[0]}\n") + f.write(f"XLLCENTER {modelgrid.xoffset + 0.5 * cellsize}\n") + f.write(f"YLLCENTER {modelgrid.yoffset + 0.5 * cellsize}\n") + f.write(f"CELLSIZE {cellsize}\n") + f.write(f"NODATA_VALUE {nodata}\n") np.savetxt(f, v, fmt="%.4f") return @@ -254,9 +254,7 @@ def test_lake(): assert ( pakdata_dict[0] == 54 - ), "number of lake connections ({}) not equal " "to 54.".format( - pakdata_dict[0] - ) + ), f"number of lake connections ({pakdata_dict[0]}) not equal to 54." assert len(connectiondata) == 54, ( "number of lake connectiondata entries ({}) not equal " @@ -290,7 +288,7 @@ def test_lake(): sim.write_simulation() success = sim.run_simulation(silent=False) - assert success, "could not run {} with lake".format(sim.name) + assert success, f"could not run {sim.name} with lake" return @@ -462,9 +460,7 @@ def test_embedded_lak_ex01(): assert ( pakdata_dict[0] == 57 - ), "number of lake connections ({}) not equal " "to 57.".format( - pakdata_dict[0] - ) + ), f"number of lake connections ({pakdata_dict[0]}) not equal to 57." assert len(connectiondata) == 57, ( "number of lake connectiondata entries ({}) not equal " @@ -498,7 +494,7 @@ def test_embedded_lak_ex01(): sim.write_simulation() success = sim.run_simulation(silent=False) - assert success, "could not run {}".format(sim.name) + assert success, f"could not run {sim.name}" def test_embedded_lak_prudic(): @@ -592,13 +588,10 @@ def test_embedded_lak_prudic(): match = np.allclose(cd[jdx], cdbase[jdx]) if not match: print( - "connection data do match for connection {} " - "for lake {}".format(idx, cd[0]) + f"connection data do match for connection {idx} for lake {cd[0]}" ) break - assert match, "connection data do not match for connection {}".format( - jdx - ) + assert match, f"connection data do not match for connection {jdx}" # evaluate the revised idomain, only layer 1 has been adjusted idomain0_test = idomain[0, :, :].copy() @@ -677,11 +670,9 @@ def test_embedded_lak_prudic_mixed(): if lakeno == 0: assert ( bedleak == "none" - ), "bedleak for lake 0 " "is not 'none' ({})".format(bedleak) + ), f"bedleak for lake 0 is not 'none' ({bedleak})" else: - assert ( - bedleak == 1.0 - ), "bedleak for lake 1 " "is not 1.0 ({})".format(bedleak) + assert bedleak == 1.0, f"bedleak for lake 1 is not 1.0 ({bedleak})" return diff --git a/autotest/t501_test.py b/autotest/t501_test.py index 22d89b014..620bb8c0b 100644 --- a/autotest/t501_test.py +++ b/autotest/t501_test.py @@ -126,11 +126,11 @@ def test_mf6(): ] exts_sim = ["gwfgwf", "ims", "tdis"] for ext in exts_model: - fname = os.path.join(out_dir, "model.{}".format(ext)) - assert os.path.isfile(fname), fname + " not found" + fname = os.path.join(out_dir, f"model.{ext}") + assert os.path.isfile(fname), f"{fname} not found" for ext in exts_sim: - fname = os.path.join(out_dir, "sim.{}".format(ext)) - assert os.path.isfile(fname), fname + " not found" + fname = os.path.join(out_dir, f"sim.{ext}") + assert os.path.isfile(fname), f"{fname} not found" return diff --git a/autotest/t502_test.py b/autotest/t502_test.py index 3bcf15ee2..11afce5cf 100644 --- a/autotest/t502_test.py +++ b/autotest/t502_test.py @@ -20,7 +20,7 @@ def test_create_and_run_model(): exe_name = "mf6" # set up simulation - tdis_name = "{}.tdis".format(sim_name) + tdis_name = f"{sim_name}.tdis" sim = MFSimulation( sim_name=sim_name, version="mf6", exe_name=exe_name, sim_ws=out_dir ) @@ -31,7 +31,7 @@ def test_create_and_run_model(): # create model instance model = mfgwf.ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) # create solution and add the model @@ -62,7 +62,7 @@ def test_create_and_run_model(): delc=500.0, top=100.0, botm=50.0, - filename="{}.dis".format(model_name), + filename=f"{model_name}.dis", ) ic_package = mfgwfic.ModflowGwfic( model, @@ -78,7 +78,7 @@ def test_create_and_run_model(): 100.0, 100.0, ], - filename="{}.ic".format(model_name), + filename=f"{model_name}.ic", ) npf_package = mfgwfnpf.ModflowGwfnpf( model, save_flows=True, icelltype=1, k=100.0 @@ -118,8 +118,8 @@ def test_create_and_run_model(): ) oc_package = mfgwfoc.ModflowGwfoc( model, - budget_filerecord=["{}.cbc".format(model_name)], - head_filerecord=["{}.hds".format(model_name)], + budget_filerecord=[f"{model_name}.cbc"], + head_filerecord=[f"{model_name}.hds"], saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], printrecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], ) diff --git a/autotest/t503_test.py b/autotest/t503_test.py index bf00a5f9e..a9e7a95c6 100644 --- a/autotest/t503_test.py +++ b/autotest/t503_test.py @@ -20,7 +20,7 @@ def download_mf6_examples(): # create folder for mf6 distribution download cpth = os.getcwd() dstpth = os.path.join("temp", dirname) - print("create...{}".format(dstpth)) + print(f"create...{dstpth}") if not os.path.exists(dstpth): os.makedirs(dstpth) os.chdir(dstpth) @@ -71,7 +71,7 @@ def download_mf6_examples(): if useModel: for file_name in fileList: if file_name.lower() == "mfsim.nam": - print("Found directory: {}".format(dirName)) + print(f"Found directory: {dirName}") src_folders.append(dirName) src_folders = sorted(src_folders) @@ -80,7 +80,7 @@ def download_mf6_examples(): dirBase = src.partition("{0}mf6examples{0}".format(os.path.sep))[2] dst = os.path.join(out_dir, dirBase) - print("copying {} -> {}".format(src, dst)) + print(f"copying {src} -> {dst}") folders.append(dst) shutil.copytree(src, dst) folders = sorted(folders) @@ -95,11 +95,11 @@ def download_mf6_examples(): def runmodel(folder): f = os.path.basename(os.path.normpath(folder)) print("\n\n") - print("**** RUNNING TEST: {} ****".format(f)) + print(f"**** RUNNING TEST: {f} ****") print("\n") # load the model into a flopy simulation - print("loading {}".format(f)) + print(f"loading {f}") sim = flopy.mf6.MFSimulation.load(f, "mf6", exe_name, folder) assert isinstance(sim, flopy.mf6.MFSimulation) @@ -112,7 +112,7 @@ def runmodel(folder): f for f in os.listdir(folder) if f.lower().endswith(".hds") ] - folder2 = folder + "-RERUN" + folder2 = f"{folder}-RERUN" sim.simulation_data.mfpath.set_sim_path(folder2) sim.write_simulation() success, buff = sim.run_simulation() diff --git a/autotest/t504_test.py b/autotest/t504_test.py index 8b721af7a..4cbc2c42d 100644 --- a/autotest/t504_test.py +++ b/autotest/t504_test.py @@ -83,9 +83,9 @@ def test001a_tharmonic(): assert os.path.exists(data_path) # model export test model = sim.get_model(model_name) - model.export("{}/tharmonic.nc".format(model.model_ws)) - model.export("{}/tharmonic.shp".format(model.model_ws)) - model.dis.botm.export("{}/botm.shp".format(model.model_ws)) + model.export(f"{model.model_ws}/tharmonic.nc") + model.export(f"{model.model_ws}/tharmonic.shp") + model.dis.botm.export(f"{model.model_ws}/botm.shp") mg = model.modelgrid @@ -234,7 +234,7 @@ def test003_gwfs_disv(): model = sim.get_model(model_name) if shapefile: - model.export("{}/{}.shp".format(pth, test_ex_name)) + model.export(f"{pth}/{test_ex_name}.shp") # change some settings chd_head_left = model.get_package("CHD_LEFT") @@ -763,9 +763,7 @@ def test006_2models_mvr(): assert ( package in model.package_type_dict or package in sim.package_type_dict - ) == ( - package in load_only or "{}6".format(package) in load_only - ) + ) == (package in load_only or f"{package}6" in load_only) assert (len(sim._exchange_files) > 0) == ( "gwf6-gwf6" in load_only or "gwf-gwf" in load_only ) @@ -882,7 +880,7 @@ def test001e_uzf_3lay(): model = sim.get_model() for package in model_package_check: assert (package in model.package_type_dict) == ( - package in load_only or "{}6".format(package) in load_only + package in load_only or f"{package}6" in load_only ) if run: # test running a runnable load_only case @@ -1244,8 +1242,8 @@ def test_mf6_output_add_observation(): # remove sfr_obs and add a new sfr obs sfr = gwf.sfr - obs_file = "{}.sfr.obs".format(model_name) - csv_file = obs_file + ".csv" + obs_file = f"{model_name}.sfr.obs" + csv_file = f"{obs_file}.csv" obs_dict = { csv_file: [ ("l08_stage", "stage", (8,)), diff --git a/autotest/t505_test.py b/autotest/t505_test.py index dc9402ae5..8eb9e14ec 100644 --- a/autotest/t505_test.py +++ b/autotest/t505_test.py @@ -99,8 +99,8 @@ def np001(): bad_model = ModflowGwf( test_sim, modelname=model_name, - model_nam_file="{}.nam".format(model_name), - **kwargs + model_nam_file=f"{model_name}.nam", + **kwargs, ) except FlopyException: ex = True @@ -111,9 +111,9 @@ def np001(): good_model = ModflowGwf( test_sim, modelname=model_name, - model_nam_file="{}.nam".format(model_name), + model_nam_file=f"{model_name}.nam", model_rel_path="model_folder", - **kwargs + **kwargs, ) # create simulation @@ -153,7 +153,7 @@ def np001(): ims_package = ModflowIms( sim, pname="my_ims_file", - filename="{}.ims".format(test_ex_name), + filename=f"{test_ex_name}.ims", print_option="ALL", complexity="SIMPLE", outer_hclose=0.00001, @@ -168,7 +168,7 @@ def np001(): ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) # test case insensitive lookup assert sim.get_model(model_name.upper()) is not None @@ -189,7 +189,7 @@ def np001(): delc=100.0, top=60.0, botm=50.0, - filename="{}.dis".format(model_name), + filename=f"{model_name}.dis", pname="mydispkg", ) # specifying dis package twice with the same name should automatically @@ -206,13 +206,13 @@ def np001(): delc=500.0, top=top, botm=botm, - filename="{}.dis".format(model_name), + filename=f"{model_name}.dis", pname="mydispkg", ) top_data = dis_package.top.get_data() assert top_data[0, 0] == 100.0 ic_package = flopy.mf6.ModflowGwfic( - model, strt="initial_heads.txt", filename="{}.ic".format(model_name) + model, strt="initial_heads.txt", filename=f"{model_name}.ic" ) npf_package = ModflowGwfnpf( model, @@ -648,7 +648,7 @@ def np002(): sim, time_units="DAYS", nper=2, perioddata=tdis_rc ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) ims_package = ModflowIms( sim, @@ -701,7 +701,7 @@ def np002(): top=top, botm=botm, idomain=2, - filename="{}.dis".format(model_name), + filename=f"{model_name}.dis", ) assert sim.simulation_data.max_columns_of_data == 22 sim.simulation_data.max_columns_of_data = dis_package.ncol.get_data() @@ -718,9 +718,7 @@ def np002(): 100.0, 100.0, ] - ic_package = ModflowGwfic( - model, strt=ic_vals, filename="{}.ic".format(model_name) - ) + ic_package = ModflowGwfic(model, strt=ic_vals, filename=f"{model_name}.ic") ic_package.strt.store_as_external_file("initial_heads.txt") npf_package = ModflowGwfnpf(model, save_flows=True, icelltype=1, k=100.0) npf_package.k.store_as_external_file("k.bin", binary=True) @@ -903,7 +901,7 @@ def test021_twri(): sim, time_units="SECONDS", nper=1, perioddata=tdis_rc ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) ims_package = ModflowIms( sim, @@ -963,7 +961,7 @@ def test021_twri(): delc=5000.0, top=top, botm=[-200, -300, -450], - filename="{}.dis".format(model_name), + filename=f"{model_name}.dis", ) strt = [ {"filename": "strt.txt", "factor": 1.0, "data": 0.0}, @@ -975,9 +973,7 @@ def test021_twri(): }, 2.0, ] - ic_package = ModflowGwfic( - model, strt=strt, filename="{}.ic".format(model_name) - ) + ic_package = ModflowGwfic(model, strt=strt, filename=f"{model_name}.ic") npf_package = ModflowGwfnpf( model, save_flows=True, @@ -1016,9 +1012,7 @@ def test021_twri(): stress_period_data = [] drn_heads = [0.0, 0.0, 10.0, 20.0, 30.0, 50.0, 70.0, 90.0, 100.0] for col, head in zip(range(1, 10), drn_heads): - stress_period_data.append( - ((0, 7, col), head, 1.0, "name_{}".format(col)) - ) + stress_period_data.append(((0, 7, col), head, 1.0, f"name_{col}")) drn_package = ModflowGwfdrn( model, print_input=True, @@ -1125,7 +1119,7 @@ def test005_advgw_tidal(): sim, time_units="DAYS", nper=4, perioddata=tdis_rc ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) ims_package = ModflowIms( sim, @@ -1153,11 +1147,9 @@ def test005_advgw_tidal(): delc=500.0, top=50.0, botm=[5.0, -10.0, {"factor": 1.0, "data": bot_data}], - filename="{}.dis".format(model_name), - ) - ic_package = ModflowGwfic( - model, strt=50.0, filename="{}.ic".format(model_name) + filename=f"{model_name}.dis", ) + ic_package = ModflowGwfic(model, strt=50.0, filename=f"{model_name}.ic") npf_package = ModflowGwfnpf( model, save_flows=True, @@ -1641,9 +1633,7 @@ def test005_advgw_tidal(): package_type_dict = {} for package in model.packagelist: if not package.package_type in package_type_dict: - assert package.filename == "new_name.{}".format( - package.package_type - ) + assert package.filename == f"new_name.{package.package_type}" package_type_dict[package.package_type] = 1 sim.write_simulation() name_file = os.path.join(run_folder, "new_name.nam") @@ -1655,8 +1645,9 @@ def test005_advgw_tidal(): package_type_dict = {} for package in model.packagelist: if not package.package_type in package_type_dict: - assert package.filename == "all_files_same_name.{}".format( - package.package_type + assert ( + package.filename + == f"all_files_same_name.{package.package_type}" ) package_type_dict[package.package_type] = 1 assert sim._tdis_file.filename == "all_files_same_name.tdis" @@ -1731,7 +1722,7 @@ def test004_bcfss(): sim, time_units="DAYS", nper=2, perioddata=tdis_rc ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) ims_package = ModflowIms( sim, @@ -1759,11 +1750,9 @@ def test004_bcfss(): delc=500.0, top=150.0, botm=[50.0, -50.0], - filename="{}.dis".format(model_name), - ) - ic_package = ModflowGwfic( - model, strt=0.0, filename="{}.ic".format(model_name) + filename=f"{model_name}.dis", ) + ic_package = ModflowGwfic(model, strt=0.0, filename=f"{model_name}.ic") wetdry_data = [] for row in range(0, 10): if row == 2 or row == 7: @@ -1888,7 +1877,7 @@ def test035_fhb(): sim, time_units="DAYS", nper=3, perioddata=tdis_rc ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) ims_package = ModflowIms( sim, @@ -1916,11 +1905,9 @@ def test035_fhb(): delc=1000.0, top=50.0, botm=-200.0, - filename="{}.dis".format(model_name), - ) - ic_package = ModflowGwfic( - model, strt=0.0, filename="{}.ic".format(model_name) + filename=f"{model_name}.dis", ) + ic_package = ModflowGwfic(model, strt=0.0, filename=f"{model_name}.ic") npf_package = ModflowGwfnpf( model, perched=True, icelltype=0, k=20.0, k33=1.0 ) @@ -2033,7 +2020,7 @@ def test006_gwf3_disv(): sim, time_units="DAYS", nper=1, perioddata=tdis_rc ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) ims_package = ModflowIms( sim, @@ -2062,7 +2049,7 @@ def test006_gwf3_disv(): idomain=1, vertices=vertices, cell2d=c2drecarray, - filename="{}.disv".format(model_name), + filename=f"{model_name}.disv", ) strt_list = [ 1, @@ -2188,7 +2175,7 @@ def test006_gwf3_disv(): 0, ] ic_package = ModflowGwfic( - model, strt=strt_list, filename="{}.ic".format(model_name) + model, strt=strt_list, filename=f"{model_name}.ic" ) k = {"filename": "k.bin", "factor": 1.0, "data": 1.0, "binary": "True"} npf_package = ModflowGwfnpf( @@ -2324,12 +2311,12 @@ def test006_2models_gnc(): model_1 = ModflowGwf( sim, modelname=model_name_1, - model_nam_file="{}.nam".format(model_name_1), + model_nam_file=f"{model_name_1}.nam", ) model_2 = ModflowGwf( sim, modelname=model_name_2, - model_nam_file="{}.nam".format(model_name_2), + model_nam_file=f"{model_name_2}.nam", ) ims_package = ModflowIms( sim, @@ -2408,7 +2395,7 @@ def test006_2models_gnc(): delc=100.0, top=0.0, botm=-100.0, - filename="{}.dis".format(model_name_1), + filename=f"{model_name_1}.dis", ) dis_package_2 = ModflowGwfdis( model_2, @@ -2420,7 +2407,7 @@ def test006_2models_gnc(): delc=33.33, top=0.0, botm=-100.0, - filename="{}.dis".format(model_name_2), + filename=f"{model_name_2}.dis", ) strt_list = [ @@ -2475,10 +2462,10 @@ def test006_2models_gnc(): 0.0, ] ic_package_1 = ModflowGwfic( - model_1, strt=strt_list, filename="{}.ic".format(model_name_1) + model_1, strt=strt_list, filename=f"{model_name_1}.ic" ) ic_package_2 = ModflowGwfic( - model_2, strt=1.0, filename="{}.ic".format(model_name_2) + model_2, strt=1.0, filename=f"{model_name_2}.ic" ) npf_package_1 = ModflowGwfnpf( model_1, save_flows=True, perched=True, icelltype=0, k=1.0, k33=1.0 @@ -2656,7 +2643,7 @@ def test050_circle_island(): sim, time_units="DAYS", nper=1, perioddata=tdis_rc ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) ims_package = ModflowIms( sim, @@ -2683,11 +2670,9 @@ def test050_circle_island(): idomain=1, vertices=vertices, cell2d=c2drecarray, - filename="{}.disv".format(model_name), - ) - ic_package = ModflowGwfic( - model, strt=0.0, filename="{}.ic".format(model_name) + filename=f"{model_name}.disv", ) + ic_package = ModflowGwfic(model, strt=0.0, filename=f"{model_name}.ic") npf_package = ModflowGwfnpf( model, save_flows=True, icelltype=0, k=10.0, k33=0.2 ) @@ -2765,7 +2750,7 @@ def test028_sfr(): filename="simulation.tdis", ) model = ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) model.name_file.save_flows.set_data(True) ims_package = ModflowIms( @@ -2806,12 +2791,12 @@ def test028_sfr(): top=top, botm=botm, idomain=idomain, - filename="{}.dis".format(model_name), + filename=f"{model_name}.dis", ) strt = testutils.read_std_array(os.path.join(pth, "strt.txt"), "float") strt_int = ["internal", "factor", 1.0, "iprn", 0, strt] ic_package = ModflowGwfic( - model, strt=strt_int, filename="{}.ic".format(model_name) + model, strt=strt_int, filename=f"{model_name}.ic" ) k_vals = testutils.read_std_array(os.path.join(pth, "k.txt"), "float") @@ -3059,7 +3044,7 @@ def test_transport(): ) # create gwf model - gwfname = "gwf_" + name + gwfname = f"gwf_{name}" newtonoptions = ["NEWTON", "UNDER_RELAXATION"] gwf = flopy.mf6.ModflowGwf( sim, @@ -3082,7 +3067,7 @@ def test_transport(): scaling_method="NONE", reordering_method="NONE", relaxation_factor=relax, - filename="{}.ims".format(gwfname), + filename=f"{gwfname}.ims", ) sim.register_ims_package(imsgwf, [gwf.name]) @@ -3131,20 +3116,20 @@ def test_transport(): # output control oc = flopy.mf6.ModflowGwfoc( gwf, - budget_filerecord="{}.cbc".format(gwfname), - head_filerecord="{}.hds".format(gwfname), + budget_filerecord=f"{gwfname}.cbc", + head_filerecord=f"{gwfname}.hds", headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")], saverecord=[("HEAD", "ALL")], printrecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], ) # create gwt model - gwtname = "gwt_" + name + gwtname = f"gwt_{name}" gwt = flopy.mf6.MFModel( sim, model_type="gwt6", modelname=gwtname, - model_nam_file="{}.nam".format(gwtname), + model_nam_file=f"{gwtname}.nam", ) # create iterative model solution and register the gwt model with it @@ -3161,7 +3146,7 @@ def test_transport(): scaling_method="NONE", reordering_method="NONE", relaxation_factor=relax, - filename="{}.ims".format(gwtname), + filename=f"{gwtname}.ims", ) sim.register_ims_package(imsgwt, [gwt.name]) @@ -3175,7 +3160,7 @@ def test_transport(): top=top, botm=botm, idomain=1, - filename="{}.dis".format(gwtname), + filename=f"{gwtname}.dis", ) # initial conditions @@ -3183,25 +3168,25 @@ def test_transport(): # advection adv = flopy.mf6.ModflowGwtadv( - gwt, scheme="UPSTREAM", filename="{}.adv".format(gwtname) + gwt, scheme="UPSTREAM", filename=f"{gwtname}.adv" ) # mass storage and transfer mst = flopy.mf6.ModflowGwtmst( - gwt, porosity=sy[idx], filename="{}.mst".format(gwtname) + gwt, porosity=sy[idx], filename=f"{gwtname}.mst" ) # sources sourcerecarray = [("WEL-1", "AUX", "CONCENTRATION")] ssm = flopy.mf6.ModflowGwtssm( - gwt, sources=sourcerecarray, filename="{}.ssm".format(gwtname) + gwt, sources=sourcerecarray, filename=f"{gwtname}.ssm" ) # output control oc = flopy.mf6.ModflowGwtoc( gwt, - budget_filerecord="{}.cbc".format(gwtname), - concentration_filerecord="{}.ucn".format(gwtname), + budget_filerecord=f"{gwtname}.cbc", + concentration_filerecord=f"{gwtname}.ucn", concentrationprintrecord=[ ("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL") ], @@ -3215,7 +3200,7 @@ def test_transport(): exgtype="GWF6-GWT6", exgmnamea=gwfname, exgmnameb=gwtname, - filename="{}.gwfgwt".format(name), + filename=f"{name}.gwfgwt", ) # write MODFLOW 6 files diff --git a/autotest/t506_test.py b/autotest/t506_test.py index 786b9e255..c40e61df6 100644 --- a/autotest/t506_test.py +++ b/autotest/t506_test.py @@ -132,8 +132,8 @@ def test_mf6disv(): gwf, xt3doptions=True, save_specific_discharge=True ) chd = flopy.mf6.ModflowGwfchd(gwf, stress_period_data=chdspd) - budget_file = name + ".bud" - head_file = name + ".hds" + budget_file = f"{name}.bud" + head_file = f"{name}.hds" oc = flopy.mf6.ModflowGwfoc( gwf, budget_filerecord=budget_file, @@ -174,7 +174,7 @@ def test_mf6disv(): vmin=vmin, vmax=vmax, ) - ax.set_title("Layer {}".format(ilay + 1)) + ax.set_title(f"Layer {ilay + 1}") pmv.plot_vector(spdis["qx"], spdis["qy"], color="white") fname = "results.png" fname = os.path.join(ws, fname) @@ -244,8 +244,8 @@ def test_mf6disu(): gwf, xt3doptions=True, save_specific_discharge=True ) chd = flopy.mf6.ModflowGwfchd(gwf, stress_period_data=chdspd) - budget_file = name + ".bud" - head_file = name + ".hds" + budget_file = f"{name}.bud" + head_file = f"{name}.hds" oc = flopy.mf6.ModflowGwfoc( gwf, budget_filerecord=budget_file, @@ -293,7 +293,7 @@ def test_mf6disu(): vmin=vmin, vmax=vmax, ) - ax.set_title("Layer {}".format(ilay + 1)) + ax.set_title(f"Layer {ilay + 1}") pmv.plot_vector(spdis["qx"], spdis["qy"], color="white") fname = "results.png" fname = os.path.join(ws, fname) @@ -402,7 +402,7 @@ def test_mfusg(): m.run_model() # head is returned as a list of head arrays for each layer - head_file = os.path.join(ws, name + ".hds") + head_file = os.path.join(ws, f"{name}.hds") head = flopy.utils.HeadUFile(head_file).get_data() if matplotlib is not None: @@ -418,7 +418,7 @@ def test_mfusg(): pmv.contour_array( head[ilay], levels=[0.2, 0.4, 0.6, 0.8], linewidths=3.0 ) - ax.set_title("Layer {}".format(ilay + 1)) + ax.set_title(f"Layer {ilay + 1}") # pmv.plot_specific_discharge(spdis, color='white') fname = "results.png" fname = os.path.join(ws, fname) @@ -463,7 +463,7 @@ def test_mfusg(): # also test load of unstructured LPF with keywords lpf2 = flopy.modflow.ModflowLpf.load( - os.path.join(ws, name + ".lpf"), m, check=False + os.path.join(ws, f"{name}.lpf"), m, check=False ) msg = "NOCVCORRECTION and NOVFC should be in lpf options but at least one is not." assert ( diff --git a/autotest/t550_test.py b/autotest/t550_test.py index df01078a5..d8c2d57b1 100644 --- a/autotest/t550_test.py +++ b/autotest/t550_test.py @@ -78,7 +78,7 @@ def test_mf6_grid_shp_export(): sim, pname="tdis", time_units="DAYS", nper=nper, perioddata=perioddata ) gwf = fp6.ModflowGwf( - sim, modelname=mf6name, model_nam_file="{}.nam".format(mf6name) + sim, modelname=mf6name, model_nam_file=f"{mf6name}.nam" ) dis6 = fp6.ModflowGwfdis( gwf, pname="dis", nlay=nlay, nrow=nrow, ncol=ncol, top=top, botm=botm @@ -102,23 +102,23 @@ def cellid(k, i, j, nrow, ncol): rch6 = fp6.ModflowGwfrcha(gwf, recharge=rech) if shapefile: # rch6.export('{}/mf6.shp'.format(tmpdir)) - m.export("{}/mfnwt.shp".format(tmpdir)) - gwf.export("{}/mf6.shp".format(tmpdir)) + m.export(f"{tmpdir}/mfnwt.shp") + gwf.export(f"{tmpdir}/mf6.shp") riv6spdarrays = dict(riv6.stress_period_data.masked_4D_arrays_itr()) rivspdarrays = dict(riv.stress_period_data.masked_4D_arrays_itr()) for k, v in rivspdarrays.items(): assert ( np.abs(np.nansum(v) - np.nansum(riv6spdarrays[k])) < 1e-6 - ), "variable {} is not equal".format(k) + ), f"variable {k} is not equal" pass if shapefile is None: return # skip remainder # check that the two shapefiles are the same - ra = shp2recarray("{}/mfnwt.shp".format(tmpdir)) - ra6 = shp2recarray("{}/mf6.shp".format(tmpdir)) + ra = shp2recarray(f"{tmpdir}/mfnwt.shp") + ra6 = shp2recarray(f"{tmpdir}/mf6.shp") # check first and last exported cells assert ra.geometry[0] == ra6.geometry[0] @@ -130,10 +130,7 @@ def cellid(k, i, j, nrow, ncol): ] assert len(different_fields) == 0 for l in np.arange(m.nlay) + 1: - assert ( - np.sum(np.abs(ra["rech_{}".format(l)] - ra6["rechar{}".format(l)])) - < 1e-6 - ) + assert np.sum(np.abs(ra[f"rech_{l}"] - ra6[f"rechar{l}"])) < 1e-6 common_fields = set(ra.dtype.names).intersection(ra6.dtype.names) common_fields.remove("geometry") # array values @@ -172,7 +169,7 @@ def test_huge_shapefile(): botm=botm, ) if shapefile: - m.export("{}/huge.shp".format(tmpdir)) + m.export(f"{tmpdir}/huge.shp") if __name__ == "__main__": diff --git a/examples/Testing/flopy3_CrossSectionExample.py b/examples/Testing/flopy3_CrossSectionExample.py index 22265a822..1d8367e55 100644 --- a/examples/Testing/flopy3_CrossSectionExample.py +++ b/examples/Testing/flopy3_CrossSectionExample.py @@ -34,11 +34,9 @@ files = ['freyberg.hds', 'freyberg.cbc'] for f in files: if os.path.isfile(os.path.join(modelpth, f)): - msg = 'Output file located: {}'.format(f) - print (msg) + print (f'Output file located: {f}') else: - errmsg = 'Error. Output file cannot be found: {}'.format(f) - print (errmsg) + print (f'Error. Output file cannot be found: {f}') fname = os.path.join(modelpth, 'freyberg.hds') diff --git a/examples/Testing/testunitcbc.py b/examples/Testing/testunitcbc.py index 72c63870e..c30981181 100644 --- a/examples/Testing/testunitcbc.py +++ b/examples/Testing/testunitcbc.py @@ -59,12 +59,12 @@ fig = plt.figure(figsize=(10,10)) ax = fig.add_subplot(1, 1, 1, aspect='equal') -hds = bf.HeadFile(os.path.join('data', modelname+'.hds')) +hds = bf.HeadFile(os.path.join('data', f"{modelname}.hds")) times = hds.get_times() head = hds.get_data(totim=times[-1]) levels = np.linspace(0, 10, 11) -cbb = bf.CellBudgetFile(os.path.join('data', modelname+'.cbc')) +cbb = bf.CellBudgetFile(os.path.join('data', f"{modelname}.cbc")) kstpkper_list = cbb.get_kstpkper() frf = cbb.get_data(text='FLOW RIGHT FACE', totim=times[-1])[0] fff = cbb.get_data(text='FLOW FRONT FACE', totim=times[-1])[0] diff --git a/examples/Tutorials/modflow/tutorial01_mf.py b/examples/Tutorials/modflow/tutorial01_mf.py index fd4255b92..11cda47e4 100644 --- a/examples/Tutorials/modflow/tutorial01_mf.py +++ b/examples/Tutorials/modflow/tutorial01_mf.py @@ -149,7 +149,7 @@ # Extract the heads -hds = bf.HeadFile(modelname + ".hds") +hds = bf.HeadFile(f"{modelname}.hds") head = hds.get_data(totim=1.0) # Contour the heads @@ -165,13 +165,13 @@ # plot head contours, and plot vectors: # Extract the heads -hds = bf.HeadFile(modelname + ".hds") +hds = bf.HeadFile(f"{modelname}.hds") times = hds.get_times() head = hds.get_data(totim=times[-1]) # Extract the cell-by-cell flows -cbb = bf.CellBudgetFile(modelname + ".cbc") +cbb = bf.CellBudgetFile(f"{modelname}.cbc") kstpkper_list = cbb.get_kstpkper() frf = cbb.get_data(text="FLOW RIGHT FACE", totim=times[-1])[0] fff = cbb.get_data(text="FLOW FRONT FACE", totim=times[-1])[0] diff --git a/examples/Tutorials/modflow/tutorial02_mf.py b/examples/Tutorials/modflow/tutorial02_mf.py index ab2cfd44f..be9f421b0 100644 --- a/examples/Tutorials/modflow/tutorial02_mf.py +++ b/examples/Tutorials/modflow/tutorial02_mf.py @@ -214,9 +214,9 @@ import flopy.utils.binaryfile as bf # Create the headfile and budget file objects -headobj = bf.HeadFile(modelname + ".hds") +headobj = bf.HeadFile(f"{modelname}.hds") times = headobj.get_times() -cbb = bf.CellBudgetFile(modelname + ".cbc") +cbb = bf.CellBudgetFile(f"{modelname}.cbc") # Setup contour parameters levels = np.linspace(0, 10, 11) @@ -247,7 +247,7 @@ # Create a map for this time ax = fig.add_subplot(len(mytimes), 1, iplot + 1, aspect="equal") - ax.set_title("stress period " + str(iplot + 1)) + ax.set_title(f"stress period {iplot + 1}") pmv = flopy.plot.PlotMapView(model=mf, layer=0, ax=ax) qm = pmv.plot_ibound() @@ -281,7 +281,7 @@ ts = headobj.get_ts(idx) fig = plt.figure(figsize=(6, 6)) ax = fig.add_subplot(1, 1, 1) -ttl = "Head at cell ({0},{1},{2})".format(idx[0] + 1, idx[1] + 1, idx[2] + 1) +ttl = f"Head at cell ({idx[0] + 1},{idx[1] + 1},{idx[2] + 1})" ax.set_title(ttl) ax.set_xlabel("time") ax.set_ylabel("head") diff --git a/examples/Tutorials/modflow6/tutorial01_mf6.py b/examples/Tutorials/modflow6/tutorial01_mf6.py index 3de3fca37..17a0d9c35 100644 --- a/examples/Tutorials/modflow6/tutorial01_mf6.py +++ b/examples/Tutorials/modflow6/tutorial01_mf6.py @@ -79,7 +79,7 @@ # Create the Flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf( sim, modelname=name, @@ -169,9 +169,9 @@ # Save heads and budget output to binary files and print heads to the model # listing file at the end of the stress period. -headfile = "{}.hds".format(name) +headfile = f"{name}.hds" head_filerecord = [headfile] -budgetfile = "{}.cbb".format(name) +budgetfile = f"{name}.cbb" budget_filerecord = [budgetfile] saverecord = [("HEAD", "ALL"), ("BUDGET", "ALL")] printrecord = [("HEAD", "LAST")] @@ -270,7 +270,7 @@ cb = plt.colorbar(pa, shrink=0.5, ax=ax) # second subplot ax = axes[1] -ax.set_title("Model Layer {}".format(Nlay)) +ax.set_title(f"Model Layer {Nlay}") modelmap = flopy.plot.PlotMapView(model=gwf, ax=ax, layer=Nlay - 1) linecollection = modelmap.plot_grid(lw=0.5, color="0.5") pa = modelmap.plot_array(h, vmin=vmin, vmax=vmax) @@ -326,7 +326,7 @@ # into the function because it contains the ia array that defines # the location of the diagonal position in the `FLOW-JA-FACE` array. -grb_file = "{}.dis.grb".format(name) +grb_file = f"{name}.dis.grb" residual = flopy.mf6.utils.get_residuals(flowja, grb_file=grb_file) # ### Plot a Map of the flow error in Layer 10 diff --git a/examples/Tutorials/modflow6data/tutorial01_mf6_data.py b/examples/Tutorials/modflow6data/tutorial01_mf6_data.py index d13a827fd..b153adf72 100644 --- a/examples/Tutorials/modflow6data/tutorial01_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial01_mf6_data.py @@ -51,8 +51,8 @@ flopy.mf6.ModflowGwfic(gwf) flopy.mf6.ModflowGwfnpf(gwf, save_specific_discharge=True) flopy.mf6.ModflowGwfchd(gwf, stress_period_data=[[(0, 0, 0), 1.0], [(2, 3, 4), 0.0]]) -budget_file = name + ".bud" -head_file = name + ".hds" +budget_file = f"{name}.bud" +head_file = f"{name}.hds" flopy.mf6.ModflowGwfoc( gwf, budget_filerecord=budget_file, diff --git a/examples/Tutorials/modflow6data/tutorial02_mf6_data.py b/examples/Tutorials/modflow6data/tutorial02_mf6_data.py index aa375f2b4..df0bcb0cf 100644 --- a/examples/Tutorials/modflow6data/tutorial02_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial02_mf6_data.py @@ -49,7 +49,7 @@ sim, time_units="DAYS", nper=4, perioddata=tdis_rc ) # create the flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf(sim, modelname=name, model_nam_file=model_nam_file) # create the flopy iterative model solver (ims) package object ims = flopy.mf6.modflow.mfims.ModflowIms(sim, pname="ims", complexity="SIMPLE") diff --git a/examples/Tutorials/modflow6data/tutorial03_mf6_data.py b/examples/Tutorials/modflow6data/tutorial03_mf6_data.py index 0a8d3a199..c2f7a3bf7 100644 --- a/examples/Tutorials/modflow6data/tutorial03_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial03_mf6_data.py @@ -52,7 +52,7 @@ sim, time_units="DAYS", nper=4, perioddata=tdis_rc ) # create the Flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf(sim, modelname=name, model_nam_file=model_nam_file) # create the flopy iterative model solver (ims) package object ims = flopy.mf6.modflow.mfims.ModflowIms(sim, pname="ims", complexity="SIMPLE") diff --git a/examples/Tutorials/modflow6data/tutorial04_mf6_data.py b/examples/Tutorials/modflow6data/tutorial04_mf6_data.py index bd16af957..e785dc770 100644 --- a/examples/Tutorials/modflow6data/tutorial04_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial04_mf6_data.py @@ -51,7 +51,7 @@ sim, time_units="DAYS", nper=4, perioddata=tdis_rc ) # create the Flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf(sim, modelname=name, model_nam_file=model_nam_file) # create the flopy iterative model solver (ims) package object ims = flopy.mf6.modflow.mfims.ModflowIms(sim, pname="ims", complexity="SIMPLE") diff --git a/examples/Tutorials/modflow6data/tutorial05_mf6_data.py b/examples/Tutorials/modflow6data/tutorial05_mf6_data.py index 18e5bb0d2..94460f974 100644 --- a/examples/Tutorials/modflow6data/tutorial05_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial05_mf6_data.py @@ -72,7 +72,7 @@ ) # create the flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf(sim, modelname=name, model_nam_file=model_nam_file) # create the flopy iterative model solver (ims) package object # (both pname and complexity are scalar data) diff --git a/examples/Tutorials/modflow6data/tutorial06_mf6_data.py b/examples/Tutorials/modflow6data/tutorial06_mf6_data.py index 736f0d316..49de9d600 100644 --- a/examples/Tutorials/modflow6data/tutorial06_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial06_mf6_data.py @@ -86,7 +86,7 @@ perioddata=[(1.0, 1, 1.0), (1.0, 1, 1.0)], ) # create the Flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf(sim, modelname=name, model_nam_file=model_nam_file) # create the flopy iterative model solver (ims) package object ims = flopy.mf6.modflow.mfims.ModflowIms(sim, pname="ims", complexity="SIMPLE") diff --git a/examples/Tutorials/modflow6data/tutorial07_mf6_data.py b/examples/Tutorials/modflow6data/tutorial07_mf6_data.py index ca2263bda..1608d91cb 100644 --- a/examples/Tutorials/modflow6data/tutorial07_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial07_mf6_data.py @@ -80,7 +80,7 @@ perioddata=[(1.0, 1, 1.0), (1.0, 1, 1.0)], ) # create the Flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf(sim, modelname=name, model_nam_file=model_nam_file) # create the flopy iterative model solver (ims) package object ims = flopy.mf6.modflow.mfims.ModflowIms(sim, pname="ims", complexity="SIMPLE") diff --git a/examples/Tutorials/modflow6data/tutorial08_mf6_data.py b/examples/Tutorials/modflow6data/tutorial08_mf6_data.py index f0797ad6a..956868a70 100644 --- a/examples/Tutorials/modflow6data/tutorial08_mf6_data.py +++ b/examples/Tutorials/modflow6data/tutorial08_mf6_data.py @@ -67,7 +67,7 @@ perioddata=[(1.0, 1, 1.0), (1.0, 1, 1.0)], ) # create the Flopy groundwater flow (gwf) model object -model_nam_file = "{}.nam".format(name) +model_nam_file = f"{name}.nam" gwf = flopy.mf6.ModflowGwf(sim, modelname=name, model_nam_file=model_nam_file) # create the flopy iterative model solver (ims) package object ims = flopy.mf6.modflow.mfims.ModflowIms(sim, pname="ims", complexity="SIMPLE") diff --git a/examples/common/setup_pmv_demo.py b/examples/common/setup_pmv_demo.py index ad71ef9ec..cc9151c5f 100644 --- a/examples/common/setup_pmv_demo.py +++ b/examples/common/setup_pmv_demo.py @@ -124,7 +124,7 @@ def run(): # create gwf model gwf = flopy.mf6.ModflowGwf( - sim, modelname=model_name, model_nam_file="{}.nam".format(model_name) + sim, modelname=model_name, model_nam_file=f"{model_name}.nam" ) gwf.name_file.save_flows = True @@ -201,8 +201,8 @@ def run(): oc = flopy.mf6.ModflowGwfoc( gwf, pname="oc", - budget_filerecord="{}.cbb".format(model_name), - head_filerecord="{}.hds".format(model_name), + budget_filerecord=f"{model_name}.cbb", + head_filerecord=f"{model_name}.hds", headprintrecord=[("COLUMNS", 10, "WIDTH", 15, "DIGITS", 6, "GENERAL")], saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], printrecord=[("HEAD", "ALL"), ("BUDGET", "ALL")], @@ -211,8 +211,8 @@ def run(): sim.write_simulation() sim.run_simulation() - mp_namea = model_name + "a_mp" - mp_nameb = model_name + "b_mp" + mp_namea = f"{model_name}a_mp" + mp_nameb = f"{model_name}b_mp" pcoord = np.array( [ @@ -248,7 +248,7 @@ def run(): ) # create backward particle group - fpth = mp_namea + ".sloc" + fpth = f"{mp_namea}.sloc" pga = flopy.modpath.ParticleGroup( particlegroupname="BACKWARD1", particledata=pa, filename=fpth ) @@ -270,7 +270,7 @@ def run(): ) pb = flopy.modpath.NodeParticleData(subdivisiondata=facedata, nodes=nodew) # create forward particle group - fpth = mp_nameb + ".sloc" + fpth = f"{mp_nameb}.sloc" pgb = flopy.modpath.ParticleGroupNodeTemplate( particlegroupname="BACKWARD2", particledata=pb, filename=fpth ) @@ -386,7 +386,7 @@ def run(): rchspd[1] = [[(0, 0, j), rrate, 0.0] for j in rcol] def build_mf6gwf(sim_folder): - print("Building mf6gwf model...{}".format(sim_folder)) + print(f"Building mf6gwf model...{sim_folder}") ws = os.path.join("data", "mf6-gwt-keating") name = "flow" sim_ws = os.path.join(ws, 'mf6gwf') @@ -461,8 +461,8 @@ def build_mf6gwf(sim_folder): pname="RCH-1", ) - head_filerecord = "{}.hds".format(name) - budget_filerecord = "{}.bud".format(name) + head_filerecord = f"{name}.hds" + budget_filerecord = f"{name}.bud" flopy.mf6.ModflowGwfoc( gwf, head_filerecord=head_filerecord, @@ -473,7 +473,7 @@ def build_mf6gwf(sim_folder): def build_mf6gwt(sim_folder): - print("Building mf6gwt model...{}".format(sim_folder)) + print(f"Building mf6gwt model...{sim_folder}") ws = os.path.join("data", "mf6-gwt-keating") name = "trans" sim_ws = os.path.join(ws, 'mf6gwt') @@ -521,7 +521,7 @@ def build_mf6gwt(sim_folder): gwt, xt3d_off=True, alh=alpha_l, ath1=alpha_th, atv=alpha_tv ) pd = [ - ("GWFHEAD", "../mf6gwf/flow.hds".format()), + ("GWFHEAD", "../mf6gwf/flow.hds"), ("GWFBUDGET", "../mf6gwf/flow.bud"), ] flopy.mf6.ModflowGwtfmi( @@ -545,8 +545,8 @@ def build_mf6gwt(sim_folder): } flopy.mf6.ModflowGwtoc( gwt, - budget_filerecord="{}.cbc".format(name), - concentration_filerecord="{}.ucn".format(name), + budget_filerecord=f"{name}.cbc", + concentration_filerecord=f"{name}.ucn", concentrationprintrecord=[ ("COLUMNS", ncol, "WIDTH", 15, "DIGITS", 6, "GENERAL") ], @@ -560,7 +560,7 @@ def build_mf6gwt(sim_folder): ], ) obs_data = { - "{}.obs.csv".format(name): [ + f"{name}.obs.csv": [ ("obs1", "CONCENTRATION", obs1), ("obs2", "CONCENTRATION", obs2), ], diff --git a/examples/scripts/flopy_henry.py b/examples/scripts/flopy_henry.py index ab1ef65aa..e7d5d3737 100644 --- a/examples/scripts/flopy_henry.py +++ b/examples/scripts/flopy_henry.py @@ -122,8 +122,8 @@ def run(): # Try to delete the output files, to prevent accidental use of older files try: os.remove(os.path.join(workspace, "MT3D001.UCN")) - os.remove(os.path.join(workspace, modelname + ".hds")) - os.remove(os.path.join(workspace, modelname + ".cbc")) + os.remove(os.path.join(workspace, f"{modelname}.hds")) + os.remove(os.path.join(workspace, f"{modelname}.cbc")) except: pass @@ -176,7 +176,7 @@ def run(): width=0.0025, ) - outfig = os.path.join(workspace, "henry_flows.{0}".format(fext)) + outfig = os.path.join(workspace, f"henry_flows.{fext}") fig.savefig(outfig, dpi=300) print("created...", outfig) @@ -194,7 +194,7 @@ def run(): ) ax.set_title("Simulated Heads") - outfig = os.path.join(workspace, "henry_heads.{0}".format(fext)) + outfig = os.path.join(workspace, f"henry_heads.{fext}") fig.savefig(outfig, dpi=300) print("created...", outfig) diff --git a/examples/scripts/flopy_lake_example.py b/examples/scripts/flopy_lake_example.py index 799d536a3..c7c6a8509 100644 --- a/examples/scripts/flopy_lake_example.py +++ b/examples/scripts/flopy_lake_example.py @@ -87,11 +87,11 @@ def run(): # create external ibound array and starting head files files = [] - hfile = "{}_strt.ref".format(name) + hfile = f"{name}_strt.ref" np.savetxt(hfile, start) hfiles = [] for kdx in range(Nlay): - file = "{}_ib{:02d}.ref".format(name, kdx + 1) + file = f"{name}_ib{kdx + 1:02d}.ref" files.append(file) hfiles.append(hfile) np.savetxt(file, ibound[kdx, :, :], fmt="%5d") @@ -118,14 +118,14 @@ def run(): # specifying, in this case, the step number and period number for which we want to retrieve data. # A three-dimensional array is returned of size `nlay, nrow, ncol`. Matplotlib contouring functions # are used to make contours of the layers or a cross-section. - hds = flopy.utils.HeadFile(os.path.join(workspace, name + ".hds")) + hds = flopy.utils.HeadFile(os.path.join(workspace, f"{name}.hds")) h = hds.get_data(kstpkper=(0, 0)) x = y = np.linspace(0, L, N) c = plt.contour(x, y, h[0], np.arange(90, 100.1, 0.2)) plt.clabel(c, fmt="%2.1f") plt.axis("scaled") - outfig = os.path.join(workspace, "lake1.{0}".format(fext)) + outfig = os.path.join(workspace, f"lake1.{fext}") fig = plt.gcf() fig.savefig(outfig, dpi=300) print("created...", outfig) @@ -135,7 +135,7 @@ def run(): plt.clabel(c, fmt="%1.1f") plt.axis("scaled") - outfig = os.path.join(workspace, "lake2.{0}".format(fext)) + outfig = os.path.join(workspace, f"lake2.{fext}") fig = plt.gcf() fig.savefig(outfig, dpi=300) print("created...", outfig) @@ -144,7 +144,7 @@ def run(): c = plt.contour(x, z, h[:, 50, :], np.arange(90, 100.1, 0.2)) plt.axis("scaled") - outfig = os.path.join(workspace, "lake3.{0}".format(fext)) + outfig = os.path.join(workspace, f"lake3.{fext}") fig = plt.gcf() fig.savefig(outfig, dpi=300) print("created...", outfig) diff --git a/examples/scripts/flopy_swi2_ex1.py b/examples/scripts/flopy_swi2_ex1.py index 2d838ae33..472b0d570 100755 --- a/examples/scripts/flopy_swi2_ex1.py +++ b/examples/scripts/flopy_swi2_ex1.py @@ -46,7 +46,7 @@ def run(): if os.path.isdir(f): continue if ".py" != os.path.splitext(f)[1].lower(): - print(" removing...{}".format(os.path.basename(f))) + print(f" removing...{os.path.basename(f)}") os.remove(os.path.join(workspace, f)) return 1 @@ -121,12 +121,12 @@ def run(): # run the model m = ml.run_model(silent=False) # read model heads - headfile = os.path.join(workspace, "{}.hds".format(modelname)) + headfile = os.path.join(workspace, f"{modelname}.hds") hdobj = flopy.utils.HeadFile(headfile) head = hdobj.get_alldata() head = np.array(head) # read model zeta - zetafile = os.path.join(workspace, "{}.zta".format(modelname)) + zetafile = os.path.join(workspace, f"{modelname}.zta") zobj = flopy.utils.CellBudgetFile(zetafile) zkstpkper = zobj.get_kstpkper() zeta = [] @@ -224,7 +224,7 @@ def run(): ax.text( x[ipos], -37.75, - "{0} days".format(((i + 1) * 100)), + f"{(i + 1) * 100} days", size=5, ha="left", va="center", @@ -260,7 +260,7 @@ def run(): ax.set_ylabel("Elevation, in meters") ax.set_xlabel("Horizontal distance, in meters") - outfig = os.path.join(workspace, "Figure06_swi2ex1.{0}".format(fext)) + outfig = os.path.join(workspace, f"Figure06_swi2ex1.{fext}") fig.savefig(outfig, dpi=300) print("created...", outfig) diff --git a/examples/scripts/flopy_swi2_ex2.py b/examples/scripts/flopy_swi2_ex2.py index 795db92d2..0ba92a9d3 100755 --- a/examples/scripts/flopy_swi2_ex2.py +++ b/examples/scripts/flopy_swi2_ex2.py @@ -60,7 +60,7 @@ def run(): if os.path.isdir(fpth): continue if ".py" != os.path.splitext(f)[1].lower(): - print(" removing...{}".format(os.path.basename(f))) + print(f" removing...{os.path.basename(f)}") try: os.remove(fpth) except: @@ -146,7 +146,7 @@ def run(): if not skipRuns: m = ml.run_model(silent=False) # read stratified results - zetafile = os.path.join(dirs[0], "{}.zta".format(modelname)) + zetafile = os.path.join(dirs[0], f"{modelname}.zta") zobj = flopy.utils.CellBudgetFile(zetafile) zkstpkper = zobj.get_kstpkper() zeta = zobj.get_data(kstpkper=zkstpkper[-1], text="ZETASRF 1")[0] @@ -195,7 +195,7 @@ def run(): if not skipRuns: m = ml.run_model(silent=False) # read vd model data - zetafile = os.path.join(dirs[0], "{}.zta".format(modelname)) + zetafile = os.path.join(dirs[0], f"{modelname}.zta") zobj = flopy.utils.CellBudgetFile(zetafile) zkstpkper = zobj.get_kstpkper() zetavd = zobj.get_data(kstpkper=zkstpkper[-1], text="ZETASRF 1")[0] @@ -468,7 +468,7 @@ def run(): ax.set_xlabel("Horizontal distance, in meters") ax.set_ylabel("Elevation, in meters") - outfig = os.path.join(workspace, "Figure07_swi2ex2.{0}".format(fext)) + outfig = os.path.join(workspace, f"Figure07_swi2ex2.{fext}") xsf.savefig(outfig, dpi=300) print("created...", outfig) diff --git a/examples/scripts/flopy_swi2_ex3.py b/examples/scripts/flopy_swi2_ex3.py index 92feff4a0..ac0bba330 100755 --- a/examples/scripts/flopy_swi2_ex3.py +++ b/examples/scripts/flopy_swi2_ex3.py @@ -42,7 +42,7 @@ def MergeData(ndim, zdata, tb): def LegBar(ax, x0, y0, t0, dx, dy, dt, cc): for c in cc: ax.plot([x0, x0 + dx], [y0, y0], color=c, linewidth=4) - ctxt = "{0:=3d} years".format(t0) + ctxt = f"{t0:=3d} years" ax.text(x0 + 2.0 * dx, y0 + dy / 2.0, ctxt, size=5) y0 += dy t0 += dt @@ -74,7 +74,7 @@ def run(): if os.path.isdir(fpth): continue if ".py" != os.path.splitext(f)[1].lower(): - print(" removing...{}".format(os.path.basename(f))) + print(f" removing...{os.path.basename(f)}") try: os.remove(fpth) except: @@ -146,11 +146,11 @@ def run(): # run the model m = ml.run_model(silent=True) - headfile = os.path.join(workspace, "{}.hds".format(modelname)) + headfile = os.path.join(workspace, f"{modelname}.hds") hdobj = flopy.utils.HeadFile(headfile) head = hdobj.get_data(totim=3.65000e05) - zetafile = os.path.join(workspace, "{}.zta".format(modelname)) + zetafile = os.path.join(workspace, f"{modelname}.zta") zobj = flopy.utils.CellBudgetFile(zetafile) zkstpkper = zobj.get_kstpkper() zeta = [] @@ -308,7 +308,7 @@ def run(): ax.set_ylabel("Elevation, in meters") ax.set_xlim(-250.0, 2500.0) - outfig = os.path.join(workspace, "Figure08_swi2ex3.{0}".format(fext)) + outfig = os.path.join(workspace, f"Figure08_swi2ex3.{fext}") fig.savefig(outfig, dpi=300) print("created...", outfig) diff --git a/examples/scripts/flopy_swi2_ex4.py b/examples/scripts/flopy_swi2_ex4.py index 4ee418263..3fac5ca8e 100644 --- a/examples/scripts/flopy_swi2_ex4.py +++ b/examples/scripts/flopy_swi2_ex4.py @@ -25,7 +25,7 @@ def LegBar(ax, x0, y0, t0, dx, dy, dt, cc): for c in cc: ax.plot([x0, x0 + dx], [y0, y0], color=c, linewidth=4) - ctxt = "{0:=3d} years".format(t0) + ctxt = f"{t0:=3d} years" ax.text(x0 + 2.0 * dx, y0 + dy / 2.0, ctxt, size=5) y0 += dy t0 += dt @@ -60,7 +60,7 @@ def run(): if os.path.isdir(fpth): continue if ".py" != os.path.splitext(f)[1].lower(): - print(" removing...{}".format(os.path.basename(f))) + print(f" removing...{os.path.basename(f)}") try: os.remove(fpth) except: @@ -297,7 +297,7 @@ def run(): # Load the simulation 1 `ZETA` data and `ZETA` observations. # read base model zeta zfile = flopy.utils.CellBudgetFile( - os.path.join(workspace, modelname + ".zta") + os.path.join(workspace, f"{modelname}.zta") ) kstpkper = zfile.get_kstpkper() zeta = [] @@ -306,13 +306,13 @@ def run(): zeta = np.array(zeta) # read swi obs zobs = np.genfromtxt( - os.path.join(workspace, modelname + ".zobs.out"), names=True + os.path.join(workspace, f"{modelname}.zobs.out"), names=True ) # Load the simulation 2 `ZETA` data and `ZETA` observations. # read saltwater well model zeta zfile2 = flopy.utils.CellBudgetFile( - os.path.join(workspace, modelname2 + ".zta") + os.path.join(workspace, f"{modelname2}.zta") ) kstpkper = zfile2.get_kstpkper() zeta2 = [] @@ -321,7 +321,7 @@ def run(): zeta2 = np.array(zeta2) # read swi obs zobs2 = np.genfromtxt( - os.path.join(workspace, modelname2 + ".zobs.out"), names=True + os.path.join(workspace, f"{modelname2}.zobs.out"), names=True ) # Create arrays for the x-coordinates and the output years @@ -365,7 +365,7 @@ def run(): drawstyle="steps-mid", linewidth=0.5, color=cc[idx], - label="{:2d} years".format(years[idx]), + label=f"{years[idx]:2d} years", ) # layer 2 ax.plot( @@ -423,7 +423,7 @@ def run(): drawstyle="steps-mid", linewidth=0.5, color=cc[idx - 5], - label="{:2d} years".format(years[idx]), + label=f"{years[idx]:2d} years", ) # layer 2 ax.plot( @@ -481,7 +481,7 @@ def run(): drawstyle="steps-mid", linewidth=0.5, color=cc[idx - 5], - label="{:2d} years".format(years[idx]), + label=f"{years[idx]:2d} years", ) # layer 2 ax.plot( @@ -580,7 +580,7 @@ def run(): size="7", ) - outfig = os.path.join(workspace, "Figure09_swi2ex4.{0}".format(fext)) + outfig = os.path.join(workspace, f"Figure09_swi2ex4.{fext}") fig.savefig(outfig, dpi=300) print("created...", outfig) diff --git a/examples/scripts/flopy_swi2_ex5.py b/examples/scripts/flopy_swi2_ex5.py index 87d6d0d7c..aeeee9c58 100755 --- a/examples/scripts/flopy_swi2_ex5.py +++ b/examples/scripts/flopy_swi2_ex5.py @@ -61,7 +61,7 @@ def run(silent=False): if os.path.isdir(fpth): continue if ".py" != os.path.splitext(f)[1].lower(): - print(" removing...{}".format(os.path.basename(f))) + print(f" removing...{os.path.basename(f)}") try: os.remove(fpth) except: @@ -144,7 +144,7 @@ def run(silent=False): for k in range(0, nlay): if zall[k] > -20.0 and zall[k + 1] <= -20: nwell = k + 1 - print("nlay={} dz={} nwell={}".format(nlay, dz, nwell)) + print(f"nlay={nlay} dz={dz} nwell={nwell}") wellQ = -2400.0 wellbtm = -20.0 wellQpm = wellQ / abs(wellbtm) @@ -243,7 +243,7 @@ def run(silent=False): get_stp = [364, 729, 1094, 1459, 364, 729, 1094, 1459] get_per = [0, 0, 0, 0, 1, 1, 1, 1] nswi_times = len(get_per) - zetafile = os.path.join(dirs[0], "{}.zta".format(modelname)) + zetafile = os.path.join(dirs[0], f"{modelname}.zta") zobj = flopy.utils.CellBudgetFile(zetafile) zeta = [] for kk in zip(get_stp, get_per): @@ -309,7 +309,7 @@ def run(silent=False): for k in range(0, nlay_swt): if bot[k, 0, 0] >= -20.0: nwell = k + 1 - print("nlay_swt={} dz={} nwell={}".format(nlay_swt, dz, nwell)) + print(f"nlay_swt={nlay_swt} dz={dz} nwell={nwell}") well_data = {} ssm_data = {} wellQ = -2400.0 @@ -531,8 +531,8 @@ def run(silent=False): for icol in range(0, ncol): for klay in range(0, nlay): # top and bottom of layer - ztop = float("{0:10.3e}".format(zall[klay])) - zbot = float("{0:10.3e}".format(zall[klay + 1])) + ztop = float(f"{zall[klay]:10.3e}") + zbot = float(f"{zall[klay + 1]:10.3e}") # fresh-salt zeta surface zt = zeta[itime, klay, 0, icol] if (ztop - zt) > eps: @@ -609,9 +609,9 @@ def run(silent=False): ax = axes.flatten()[isp] iyr = itime + 1 if iyr > 1: - ctxt = "{} years".format(iyr) + ctxt = f"{iyr} years" else: - ctxt = "{} year".format(iyr) + ctxt = f"{iyr} year" ax.text( 0.95, 0.925, @@ -622,7 +622,7 @@ def run(silent=False): size="8", ) - outfig = os.path.join(workspace, "Figure11_swi2ex5.{0}".format(fext)) + outfig = os.path.join(workspace, f"Figure11_swi2ex5.{fext}") xsf.savefig(outfig, dpi=300) print("created...", outfig) diff --git a/flopy/discretization/grid.py b/flopy/discretization/grid.py index 7dbc0d3c1..1bc0cb448 100644 --- a/flopy/discretization/grid.py +++ b/flopy/discretization/grid.py @@ -193,16 +193,16 @@ def __repr__(self): and self.angrot is not None ): items += [ - "xll:" + str(self.xoffset), - "yll:" + str(self.yoffset), - "rotation:" + str(self.angrot), + f"xll:{self.xoffset!s}", + f"yll:{self.yoffset!s}", + f"rotation:{self.angrot!s}", ] if self.proj4 is not None: - items.append("proj4_str:" + str(self.proj4)) + items.append(f"proj4_str:{self.proj4}") if self.units is not None: - items.append("units:" + str(self.units)) + items.append(f"units:{self.units}") if self.lenuni is not None: - items.append("lenuni:" + str(self.lenuni)) + items.append(f"lenuni:{self.lenuni}") return "; ".join(items) @property @@ -259,7 +259,7 @@ def proj4(self): else: proj4 = self._proj4 elif self.epsg is not None: - proj4 = "epsg:{}".format(self.epsg) + proj4 = f"epsg:{self.epsg}" return proj4 @proj4.setter diff --git a/flopy/discretization/structuredgrid.py b/flopy/discretization/structuredgrid.py index 14ca47a9c..8d300a273 100644 --- a/flopy/discretization/structuredgrid.py +++ b/flopy/discretization/structuredgrid.py @@ -1534,7 +1534,7 @@ def get_plottable_layer_array(self, a, layer): plotarray = plotarray[layer, :, :] else: raise Exception("Array to plot must be of dimension 1, 2, or 3") - msg = "{} /= {}".format(plotarray.shape, required_shape) + msg = f"{plotarray.shape} /= {required_shape}" assert plotarray.shape == required_shape, msg return plotarray @@ -1615,11 +1615,10 @@ def from_binary_grid_file(cls, file_path, verbose=False): grb_obj = MfGrdFile(file_path, verbose=verbose) if grb_obj.grid_type != "DIS": - err_msg = ( - "Binary grid file ({}) ".format(os.path.basename(file_path)) - + "is not a structured (DIS) grid." + raise ValueError( + f"Binary grid file ({os.path.basename(file_path)}) " + "is not a structured (DIS) grid." ) - raise ValueError(err_msg) idomain = grb_obj.idomain xorigin = grb_obj.xorigin diff --git a/flopy/discretization/unstructuredgrid.py b/flopy/discretization/unstructuredgrid.py index 3916479ec..1955a8840 100644 --- a/flopy/discretization/unstructuredgrid.py +++ b/flopy/discretization/unstructuredgrid.py @@ -128,14 +128,13 @@ def __init__( if iverts is not None: if self.grid_varies_by_layer: - msg = "Length of iverts must equal grid nodes ({} {})".format( - len(iverts), self.nnodes + msg = ( + "Length of iverts must equal grid nodes " + f"({len(iverts)} {self.nnodes})" ) assert len(iverts) == self.nnodes, msg else: - msg = "Length of iverts must equal ncpl ({} {})".format( - len(iverts), self.ncpl - ) + msg = f"Length of iverts must equal ncpl ({len(iverts)} {self.ncpl})" assert np.all([cpl == len(iverts) for cpl in self.ncpl]), msg return @@ -809,11 +808,10 @@ def from_binary_grid_file(cls, file_path, verbose=False): grb_obj = MfGrdFile(file_path, verbose=verbose) if grb_obj.grid_type != "DISU": - err_msg = ( - "Binary grid file ({}) ".format(os.path.basename(file_path)) - + "is not a vertex (DISU) grid." + raise ValueError( + f"Binary grid file ({os.path.basename(file_path)}) " + "is not a vertex (DISU) grid." ) - raise ValueError(err_msg) iverts = grb_obj.iverts if iverts is not None: @@ -842,8 +840,7 @@ def from_binary_grid_file(cls, file_path, verbose=False): angrot=angrot, ) else: - err_msg = ( - "{} binary grid file".format(os.path.basename(file_path)) - + " does not include vertex data" + raise TypeError( + f"{os.path.basename(file_path)} binary grid file " + "does not include vertex data" ) - raise TypeError(err_msg) diff --git a/flopy/discretization/vertexgrid.py b/flopy/discretization/vertexgrid.py index b4623028f..40e855553 100644 --- a/flopy/discretization/vertexgrid.py +++ b/flopy/discretization/vertexgrid.py @@ -314,9 +314,7 @@ def get_cell_vertices(self, cellid): """ while cellid >= self.ncpl: if cellid > self.nnodes: - err = "cellid {} out of index for size {}".format( - cellid, self.nnodes - ) + err = f"cellid {cellid} out of index for size {self.nnodes}" raise IndexError(err) cellid -= self.ncpl @@ -491,7 +489,7 @@ def get_plottable_layer_array(self, a, layer): plotarray = plotarray[layer, :] else: raise Exception("Array to plot must be of dimension 1 or 2") - msg = "{} /= {}".format(plotarray.shape[0], required_shape) + msg = f"{plotarray.shape[0]} /= {required_shape}" assert plotarray.shape == required_shape, msg return plotarray @@ -518,11 +516,10 @@ def from_binary_grid_file(cls, file_path, verbose=False): grb_obj = MfGrdFile(file_path, verbose=verbose) if grb_obj.grid_type != "DISV": - err_msg = ( - "Binary grid file ({}) ".format(os.path.basename(file_path)) - + "is not a vertex (DISV) grid." + raise ValueError( + f"Binary grid file ({os.path.basename(file_path)}) " + "is not a vertex (DISV) grid." ) - raise ValueError(err_msg) idomain = grb_obj.idomain xorigin = grb_obj.xorigin diff --git a/flopy/export/metadata.py b/flopy/export/metadata.py index 1ff420cee..d73646c29 100644 --- a/flopy/export/metadata.py +++ b/flopy/export/metadata.py @@ -40,9 +40,7 @@ def __init__(self, sciencebase_id, model): self.model_grid = model.modelgrid self.model_time = model.modeltime self.sciencebase_url = ( - "https://www.sciencebase.gov/catalog/item/{}".format( - sciencebase_id - ) + f"https://www.sciencebase.gov/catalog/item/{sciencebase_id}" ) self.sb = self.get_sciencebase_metadata(sciencebase_id) if self.sb is None: @@ -140,7 +138,7 @@ def creator(self): @property def creator_url(self): urlname = "-".join(self.creator.get("name").replace(".", "").split()) - url = "https://www.usgs.gov/staff-profiles/" + urlname.lower() + url = f"https://www.usgs.gov/staff-profiles/{urlname.lower()}" # check if it exists txt = get_url_text(url) if txt is not None: @@ -208,7 +206,7 @@ def time_coverage(self): strt = pd.Timestamp(self.model_time.start_datetime) mlen = self.model_time.perlen.sum() tunits = self.model_time.time_units - tc["duration"] = "{} {}".format(mlen, tunits) + tc["duration"] = f"{mlen} {tunits}" end = strt + pd.Timedelta(mlen, unit="d") tc["end"] = str(end) return tc diff --git a/flopy/export/netcdf.py b/flopy/export/netcdf.py index 7292f1d24..d919e9ad8 100644 --- a/flopy/export/netcdf.py +++ b/flopy/export/netcdf.py @@ -76,9 +76,7 @@ def log(self, phrase): pass t = datetime.now() if phrase in self.items.keys(): - s = "{} finished: {}, took: {}\n".format( - t, phrase, t - self.items[phrase] - ) + s = f"{t} finished: {phrase}, took: {t - self.items[phrase]}\n" if self.echo: print(s) if self.filename: @@ -158,7 +156,7 @@ def __init__( prj=None, logger=None, forgive=False, - **kwargs + **kwargs, ): assert output_filename.lower().endswith(".nc") @@ -191,7 +189,7 @@ def __init__( # self.dimension_names = ('layer', 'ncpl') else: raise Exception( - "Grid type {} not supported.".format(self.model_grid.grid_type) + f"Grid type {self.model_grid.grid_type} not supported." ) self.shape = self.model_grid.shape @@ -207,14 +205,14 @@ def __init__( self.start_datetime = self._dt_str( dateutil.parser.parse(self.model_time.start_datetime) ) - self.logger.warn("start datetime:{0}".format(str(self.start_datetime))) + self.logger.warn(f"start datetime:{self.start_datetime!s}") proj4_str = self.model_grid.proj4 if proj4_str is None: proj4_str = "epsg:4326" self.log( "Warning: model has no coordinate reference system specified. " - "Using default proj4 string: {}".format(proj4_str) + f"Using default proj4 string: {proj4_str}" ) self.proj4_str = proj4_str self.grid_units = self.model_grid.units @@ -253,9 +251,7 @@ def __add__(self, other): ) else: raise Exception( - "NetCdf.__add__(): unrecognized other:{0}".format( - str(type(other)) - ) + f"NetCdf.__add__(): unrecognized other:{type(other)}" ) return new_net @@ -273,9 +269,7 @@ def __sub__(self, other): ) else: raise Exception( - "NetCdf.__sub__(): unrecognized other:{0}".format( - str(type(other)) - ) + f"NetCdf.__sub__(): unrecognized other:{type(other)}" ) return new_net @@ -293,9 +287,7 @@ def __mul__(self, other): ) else: raise Exception( - "NetCdf.__mul__(): unrecognized other:{0}".format( - str(type(other)) - ) + f"NetCdf.__mul__(): unrecognized other:{type(other)}" ) return new_net @@ -318,9 +310,7 @@ def __truediv__(self, other): ) else: raise Exception( - "NetCdf.__sub__(): unrecognized other:{0}".format( - str(type(other)) - ) + f"NetCdf.__sub__(): unrecognized other:{type(other)}" ) return new_net @@ -355,7 +345,7 @@ def append(self, other, suffix="_1"): vname_norm = self.normalize_name(vname) assert ( vname_norm in self.nc.variables.keys() - ), "dict var not in self.vars:{0}-->".format(vname) + ",".join( + ), f"dict var not in self.vars:{vname}-->" + ",".join( self.nc.variables.keys() ) @@ -400,10 +390,10 @@ def zeros_like( for vname in other.var_attr_dict.keys(): if new_net.nc.variables.get(vname) is not None: new_net.logger.warn( - "variable {0} already defined, skipping".format(vname) + f"variable {vname} already defined, skipping" ) continue - new_net.log("adding variable {0}".format(vname)) + new_net.log(f"adding variable {vname}") var = other.nc.variables[vname] data = var[:] try: @@ -420,7 +410,7 @@ def zeros_like( dimensions=var.dimensions, ) new_var[:] = new_data - new_net.log("adding variable {0}".format(vname)) + new_net.log(f"adding variable {vname}") global_attrs = {} for attr in other.nc.ncattrs(): if attr not in new_net.nc.ncattrs(): @@ -438,7 +428,7 @@ def empty_like( ) while os.path.exists(output_filename): - print("{}...already exists".format(output_filename)) + print(f"{output_filename}...already exists") output_filename = ( str(time.mktime(datetime.now().timetuple())) + ".nc" ) @@ -494,14 +484,12 @@ def difference( try: import netCDF4 except Exception as e: - mess = "error import netCDF4: {0}".format(str(e)) + mess = f"error import netCDF4: {e!s}" self.logger.warn(mess) raise Exception(mess) if isinstance(other, str): - assert os.path.exists( - other - ), "filename 'other' not found:{0}".format(other) + assert os.path.exists(other), f"filename 'other' not found:{other}" other = netCDF4.Dataset(other, "r") assert isinstance(other, netCDF4.Dataset) @@ -522,12 +510,11 @@ def difference( other_dimens = other.dimensions for d in self_dimens.keys(): if d not in other_dimens: - self.logger.warn("missing dimension in other:{0}".format(d)) + self.logger.warn(f"missing dimension in other:{d}") return if len(self_dimens[d]) != len(other_dimens[d]): self.logger.warn( - "dimension not consistent: " - "{0}:{1}".format(self_dimens[d], other_dimens[d]) + f"dimension not consistent: {self_dimens[d]}:{other_dimens[d]}" ) return # should be good to go @@ -543,9 +530,9 @@ def difference( vname not in self.var_attr_dict or new_net.nc.variables.get(vname) is not None ): - self.logger.warn("skipping variable: {0}".format(vname)) + self.logger.warn(f"skipping variable: {vname}") continue - self.log("processing variable {0}".format(vname)) + self.log(f"processing variable {vname}") s_var = self.nc.variables[vname] o_var = other.variables[vname] s_data = s_var[:] @@ -554,7 +541,7 @@ def difference( # keep the masks to apply later if isinstance(s_data, np.ma.MaskedArray): - self.logger.warn("masked array for {0}".format(vname)) + self.logger.warn(f"masked array for {vname}") s_mask = s_data.mask s_data = np.array(s_data) s_data[s_mask] = 0.0 @@ -574,21 +561,19 @@ def difference( elif minuend.lower() == "other": d_data = o_data - s_data else: - mess = "unrecognized minuend {0}".format(minuend) + mess = f"unrecognized minuend {minuend}" self.logger.warn(mess) raise Exception(mess) # check for non-zero diffs if onlydiff and d_data.sum() == 0.0: self.logger.warn( - "var {0} has zero differences, skipping...".format(vname) + f"var {vname} has zero differences, skipping..." ) continue self.logger.warn( - "resetting diff attrs max,min:{0},{1}".format( - d_data.min(), d_data.max() - ) + f"resetting diff attrs max,min:{d_data.min()},{d_data.max()}" ) attrs = self.var_attr_dict[vname].copy() attrs["max"] = np.nanmax(d_data) @@ -614,12 +599,13 @@ def difference( ) var[:] = d_data - self.log("processing variable {0}".format(vname)) + self.log(f"processing variable {vname}") def _dt_str(self, dt): """for datetime to string for year < 1900""" - dt_str = "{0:04d}-{1:02d}-{2:02d}T{3:02d}:{4:02d}:{5:02}Z".format( - dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second + dt_str = ( + f"{dt.year:04d}-{dt.month:02d}-{dt.day:02d}T" + f"{dt.hour:02d}:{dt.minute:02d}:{dt.second:02}Z" ) return dt_str @@ -637,9 +623,7 @@ def write(self): if self.nc.attributes.get(k) is not None: self.nc.setncattr(k, v) except Exception: - self.logger.warn( - "error setting global attribute {0}".format(k) - ) + self.logger.warn(f"error setting global attribute {k}") self.nc.sync() self.nc.close() @@ -672,9 +656,7 @@ def _initialize_attributes(self): try: htol, rtol = self.model.solver_tols() except Exception as e: - self.logger.warn( - "unable to get solver tolerances:{0}".format(str(e)) - ) + self.logger.warn(f"unable to get solver tolerances:{e!s}") self.global_attributes["solver_head_tolerance"] = htol self.global_attributes["solver_flux_tolerance"] = rtol spatial_attribs = { @@ -714,9 +696,9 @@ def initialize_geometry(self): pyproj220 = LooseVersion(pyproj.__version__) >= LooseVersion("2.2.0") proj4_str = self.proj4_str - print("initialize_geometry::proj4_str = {}".format(proj4_str)) + print(f"initialize_geometry::proj4_str = {proj4_str}") - self.log("building grid crs using proj4 string: {}".format(proj4_str)) + self.log(f"building grid crs using proj4 string: {proj4_str}") if pyproj220: self.grid_crs = pyproj.CRS(proj4_str) else: @@ -724,7 +706,7 @@ def initialize_geometry(self): proj4_str = "+init=" + proj4_str self.grid_crs = pyproj.Proj(proj4_str, preserve_units=True) - print("initialize_geometry::self.grid_crs = {}".format(self.grid_crs)) + print(f"initialize_geometry::self.grid_crs = {self.grid_crs}") vmin, vmax = self.model_grid.botm.min(), self.model_grid.top.max() if self.z_positive == "down": @@ -748,12 +730,10 @@ def initialize_geometry(self): nc_crs = pyproj.Proj(nc_epsg_str) self.transformer = None - print("initialize_geometry::nc_crs = {}".format(nc_crs)) + print(f"initialize_geometry::nc_crs = {nc_crs}") if pyproj220: - print( - "transforming coordinates using = {}".format(self.transformer) - ) + print(f"transforming coordinates using = {self.transformer}") self.log("projecting grid cell center arrays") if pyproj220: @@ -804,7 +784,7 @@ def initialize_file(self, time_values=None): try: self.nc = netCDF4.Dataset(self.output_filename, "w") except Exception as e: - msg = "error creating netcdf dataset:\n{}".format(str(e)) + msg = f"error creating netcdf dataset:\n{e!s}" raise Exception(msg) # write some attributes @@ -812,7 +792,7 @@ def initialize_file(self, time_values=None): self.nc.setncattr( "Conventions", - "CF-1.6, ACDD-1.3, flopy {}".format(flopy.__version__), + f"CF-1.6, ACDD-1.3, flopy {flopy.__version__}", ) self.nc.setncattr( "date_created", datetime.utcnow().strftime("%Y-%m-%dT%H:%M:00Z") @@ -828,9 +808,7 @@ def initialize_file(self, time_values=None): try: self.nc.setncattr(k, v) except: - self.logger.warn( - "error setting global attribute {0}".format(k) - ) + self.logger.warn(f"error setting global attribute {k}") self.global_attributes = {} self.log("setting standard attributes") @@ -854,9 +832,7 @@ def initialize_file(self, time_values=None): self.log("setting CRS info") attribs = { - "units": "{} since {}".format( - self.time_units, self.start_datetime - ), + "units": f"{self.time_units} since {self.start_datetime}", "standard_name": "time", "long_name": NC_LONG_NAMES.get("time", "time"), "calendar": "gregorian", @@ -865,7 +841,7 @@ def initialize_file(self, time_values=None): time = self.create_variable( "time", attribs, precision_str="f8", dimensions=("time",) ) - self.logger.warn("time_values:{0}".format(str(time_values))) + self.logger.warn(f"time_values:{time_values!s}") time[:] = np.asarray(time_values) # Elevation @@ -1064,13 +1040,13 @@ def initialize_group( self.initialize_file() if group in self.nc.groups: - raise AttributeError("{} group already initialized".format(group)) + raise AttributeError(f"{group} group already initialized") - self.log("creating netcdf group {}".format(group)) + self.log(f"creating netcdf group {group}") self.nc.createGroup(group) - self.log("{} group created".format(group)) + self.log(f"{group} group created") - self.log("creating {} group dimensions".format(group)) + self.log(f"creating {group} group dimensions") for dim in dimensions: if dim == "time": if "time" not in dimension_data: @@ -1083,8 +1059,7 @@ def initialize_group( else: if dim not in dimension_data: raise AssertionError( - "{} information must be supplied " - "to dimension data".format(dim) + f"{dim} information must be supplied to dimension data" ) else: @@ -1092,14 +1067,14 @@ def initialize_group( dim, len(dimension_data[dim]) ) - self.log("created {} group dimensions".format(group)) + self.log(f"created {group} group dimensions") dim_names = tuple([i for i in dimensions if i != "time"]) for dim in dimensions: if dim.lower() == "time": if "time" not in attributes: - unit_value = "{} since {}".format( - self.time_units, self.start_datetime + unit_value = ( + f"{self.time_units} since {self.start_datetime}" ) attribs = { "units": unit_value, @@ -1202,31 +1177,29 @@ def create_group_variable( if name in self.nc.groups[group].variables.keys(): if self.forgive: self.logger.warn( - "skipping duplicate {} group variable: {}".format( - group, name - ) + f"skipping duplicate {group} group variable: {name}" ) return else: raise Exception( - "duplicate {} group variable name: {}".format(group, name) + f"duplicate {group} group variable name: {name}" ) - self.log("creating group {} variable: {}".format(group, name)) + self.log(f"creating group {group} variable: {name}") if precision_str not in PRECISION_STRS: raise AssertionError( "netcdf.create_variable() error: precision " - "string {} not in {}".format(precision_str, PRECISION_STRS) + f"string {precision_str} not in {PRECISION_STRS}" ) if group not in self.nc.groups: raise AssertionError( - "netcdf group `{}` must be created before " - "variables can be added to it".format(group) + f"netcdf group `{group}` must be created before " + "variables can be added to it" ) - self.var_attr_dict["{}/{}".format(group, name)] = attributes + self.var_attr_dict[f"{group}/{name}"] = attributes var = self.nc.groups[group].createVariable( name, @@ -1241,10 +1214,9 @@ def create_group_variable( var.setncattr(k, v) except: self.logger.warn( - "error setting attribute" - "{} for group {} variable {}".format(k, group, name) + f"error setting attribute{k} for group {group} variable {name}" ) - self.log("creating group {} variable: {}".format(group, name)) + self.log(f"creating group {group} variable: {name}") return var @@ -1296,14 +1268,12 @@ def create_variable( and name in self.nc.variables.keys() ): if self.forgive: - self.logger.warn( - "skipping duplicate variable: {0}".format(name) - ) + self.logger.warn(f"skipping duplicate variable: {name}") return else: - raise Exception("duplicate variable name: {0}".format(name)) + raise Exception(f"duplicate variable name: {name}") if name in self.nc.variables.keys(): - raise Exception("duplicate variable name: {0}".format(name)) + raise Exception(f"duplicate variable name: {name}") self.log("creating variable: " + str(name)) assert ( @@ -1342,8 +1312,7 @@ def create_variable( var.setncattr(k, v) except: self.logger.warn( - "error setting attribute" - "{0} for variable {1}".format(k, name) + f"error setting attribute{k} for variable {name}" ) self.log("creating variable: " + str(name)) return var diff --git a/flopy/export/shapefile_utils.py b/flopy/export/shapefile_utils.py index 4c503361c..c3b30e69b 100755 --- a/flopy/export/shapefile_utils.py +++ b/flopy/export/shapefile_utils.py @@ -143,7 +143,7 @@ def write_grid_shapefile( elif mg.grid_type == "unstructured": verts = [mg.get_cell_vertices(cellid) for cellid in range(mg.nnodes)] else: - raise Exception("Grid type {} not supported.".format(mg.grid_type)) + raise Exception(f"Grid type {mg.grid_type} not supported.") # set up the attribute fields and arrays of attributes if mg.grid_type == "structured": @@ -212,7 +212,7 @@ def write_grid_shapefile( # close w.close() - print("wrote {}".format(filename)) + print(f"wrote {filename}") # write the projection file write_prj(filename, mg, epsg, prj) return @@ -309,9 +309,8 @@ def model_attributes_to_shapefile( assert a.array is not None except: print( - "Failed to get data for {} array, {} package".format( - a.name, pak.name[0] - ) + "Failed to get data for " + f"{a.name} array, {pak.name[0]} package" ) continue if isinstance(a.name, list) and a.name[0] == "thickness": @@ -336,7 +335,7 @@ def model_attributes_to_shapefile( # fix for mf6 case arr = arr[0] assert arr.shape == horz_shape - name = "{}_{}".format(aname, ilay + 1) + name = f"{aname}_{ilay + 1}" array_dict[name] = arr elif ( a.data_type == DataType.transient2d @@ -346,13 +345,12 @@ def model_attributes_to_shapefile( assert a.array is not None except: print( - "Failed to get data for {} array, {} package".format( - a.name, pak.name[0] - ) + "Failed to get data for " + f"{a.name} array, {pak.name[0]} package" ) continue for kper in range(a.array.shape[0]): - name = "{}{}".format(shape_attr_name(a.name), kper + 1) + name = f"{shape_attr_name(a.name)}{kper + 1}" arr = a.array[kper][0] assert arr.shape == horz_shape array_dict[name] = arr @@ -367,7 +365,7 @@ def model_attributes_to_shapefile( for kper in range(array.shape[0]): for k in range(array.shape[1]): n = shape_attr_name(name, length=4) - aname = "{}{}{}".format(n, k + 1, kper + 1) + aname = f"{n}{k + 1}{kper + 1}" arr = array[kper][k] assert arr.shape == horz_shape if np.all(np.isnan(arr)): @@ -381,9 +379,8 @@ def model_attributes_to_shapefile( ): for ilay in range(a.model.modelgrid.nlay): u2d = a[ilay] - name = "{}_{}".format( - shape_attr_name(u2d.name), - ilay + 1, + name = ( + f"{shape_attr_name(u2d.name)}_{ilay + 1}" ) arr = u2d.array assert arr.shape == horz_shape @@ -537,7 +534,7 @@ def recarray2shp( mg=None, epsg=None, prj=None, - **kwargs + **kwargs, ): """ Write a numpy record array to a shapefile, using a corresponding @@ -624,7 +621,7 @@ def recarray2shp( w.close() write_prj(shpname, mg, epsg, prj) - print("wrote {}".format(shpname)) + print(f"wrote {shpname}") return @@ -849,7 +846,7 @@ def _getvalue(self, k): def _getgcsparam(self, txt): nvalues = 3 if txt.lower() == "spheroid" else 2 - tmp = self._gettxt('{}["'.format(txt), "]") + tmp = self._gettxt(f'{txt}["', "]") if tmp is not None: tmp = tmp.replace('"', "").split(",") name = tmp[0:1] @@ -921,7 +918,7 @@ def get_spatialreference(epsg, text="esriwkt"): ) urls = [] for cat in epsg_categories: - url = "{}/ref/{}/{}/{}/".format(srefhttp, cat, epsg, text) + url = f"{srefhttp}/ref/{cat}/{epsg}/{text}/" urls.append(url) result = get_url_text(url) if result is not None: @@ -930,16 +927,15 @@ def get_spatialreference(epsg, text="esriwkt"): return result.replace("\n", "") elif result is None and text != "epsg": error_msg = ( - "No internet connection or " - "epsg code {} not found at:\n".format(epsg) + f"No internet connection or epsg code {epsg} not found at:\n" ) for idx, url in enumerate(urls): - error_msg += " {:>2d}: {}\n".format(idx + 1, url) + error_msg += f" {idx + 1:>2d}: {url}\n" print(error_msg) # epsg code not listed on spatialreference.org # may still work with pyproj elif text == "epsg": - return "epsg:{}".format(epsg) + return f"epsg:{epsg}" @staticmethod def getproj4(epsg): @@ -1008,10 +1004,10 @@ def _write(self, data): def reset(self, verbose=True): if os.path.exists(self.location): if verbose: - print("Resetting {}".format(self.location)) + print(f"Resetting {self.location}") os.remove(self.location) elif verbose: - print("{} does not exist, no reset required".format(self.location)) + print(f"{self.location} does not exist, no reset required") def add(self, epsg, prj): """ @@ -1042,4 +1038,4 @@ def show(): ep = EpsgReference() prj = ep.to_dict() for k, v in prj.items(): - print("{}:\n{}\n".format(k, v)) + print(f"{k}:\n{v}\n") diff --git a/flopy/export/utils.py b/flopy/export/utils.py index bd4895cff..0ded8e005 100644 --- a/flopy/export/utils.py +++ b/flopy/export/utils.py @@ -25,9 +25,9 @@ } path = os.path.split(netcdf.__file__)[0] -with open(path + "/longnames.json") as f: +with open(f"{path}/longnames.json") as f: NC_LONG_NAMES = json.load(f) -with open(path + "/unitsformat.json") as f: +with open(f"{path}/unitsformat.json") as f: NC_UNITS_FORMAT = json.load(f) @@ -87,7 +87,7 @@ def ensemble_helper( outputs_filename, models[0], models[0].load_results(as_dict=True), - **kwargs + **kwargs, ) vdict = {} vdicts = [ @@ -95,7 +95,7 @@ def ensemble_helper( vdict, models[0], models[0].load_results(as_dict=True), - **kwargs + **kwargs, ) ] i = 1 @@ -149,7 +149,7 @@ def _add_output_nc_variable( mask_array3d=None, ): if logger: - logger.log("creating array for {0}".format(var_name)) + logger.log(f"creating array for {var_name}") array = np.zeros( (len(times), shape3d[0], shape3d[1], shape3d[2]), dtype=np.float32 @@ -203,7 +203,7 @@ def _add_output_nc_variable( continue if logger: - logger.log("creating array for {0}".format(var_name)) + logger.log(f"creating array for {var_name}") for mask_val in mask_vals: array[np.where(array == mask_val)] = np.NaN @@ -238,7 +238,7 @@ def _add_output_nc_variable( dimensions=dim_tuple, ) except Exception as e: - estr = "error creating variable {0}:\n{1}".format(var_name, str(e)) + estr = f"error creating variable {var_name}:\n{e!s}" if logger: logger.lraise(estr) else: @@ -247,9 +247,7 @@ def _add_output_nc_variable( try: var[:] = array except Exception as e: - estr = "error setting array to variable {0}:\n{1}".format( - var_name, str(e) - ) + estr = f"error setting array to variable {var_name}:\n{e!s}" if logger: logger.lraise(estr) else: @@ -274,16 +272,16 @@ def _add_output_nc_zonebudget_variable(f, array, var_name, flux, logger=None): """ if logger: - logger.log("creating array for {}".format(var_name)) + logger.log(f"creating array for {var_name}") mn = np.min(array) mx = np.max(array) precision_str = "f4" if flux: - units = "{}^3/{}".format(f.grid_units, f.time_units) + units = f"{f.grid_units}^3/{f.time_units}" else: - units = "{}^3".format(f.grid_units) + units = f"{f.grid_units}^3" attribs = {"long_name": var_name} attribs["coordinates"] = "time zone" attribs["min"] = mn @@ -339,7 +337,7 @@ def output_helper(f, ml, oudic, **kwargs): mask_vals = kwargs.pop("masked_vals") if len(kwargs) > 0 and logger is not None: str_args = ",".join(kwargs) - logger.warn("unused kwargs: " + str_args) + logger.warn(f"unused kwargs: {str_args}") zonebud = None zbkey = None @@ -355,9 +353,7 @@ def output_helper(f, ml, oudic, **kwargs): # that they will line up for key in oudic.keys(): out = oudic[key] - times = [ - float("{0:15.6f}".format(t)) for t in out.recordarray["totim"] - ] + times = [float(f"{t:15.6f}") for t in out.recordarray["totim"]] out.recordarray["totim"] = times times = [] @@ -395,7 +391,7 @@ def output_helper(f, ml, oudic, **kwargs): if len(skipped_times) > 0: msg = ( "the following output times are not common to all " - "output files and are being skipped:\n{0}".format(skipped_times) + f"output files and are being skipped:\n{skipped_times}" ) if logger: logger.warn(msg) @@ -475,7 +471,7 @@ def output_helper(f, ml, oudic, **kwargs): ) else: - estr = "unrecognized file extension:{0}".format(filename) + estr = f"unrecognized file extension:{filename}" if logger: logger.lraise(estr) else: @@ -534,7 +530,7 @@ def output_helper(f, ml, oudic, **kwargs): continue if mflay is not None and k != mflay: continue - name = attrib_name + "{}_{}".format(per, k) + name = f"{attrib_name}{per}_{k}" attrib_dict[name] = plotarray[per][k] elif isinstance(out_obj, CellBudgetFile): @@ -560,14 +556,14 @@ def output_helper(f, ml, oudic, **kwargs): continue if mflay is not None and k != mflay: continue - name = attrib_name + "{}_{}".format(per, k) + name = f"{attrib_name}{per}_{k}" attrib_dict[name] = plotarray[per][k] if attrib_dict: shapefile_utils.write_grid_shapefile(f, ml.modelgrid, attrib_dict) else: - msg = "unrecognized export argument:{0}".format(f) + msg = f"unrecognized export argument:{f}" if logger: logger.lraise(msg) else: @@ -647,7 +643,7 @@ def model_export(f, ml, fmt=None, **kwargs): ) else: - raise NotImplementedError("unrecognized export argument:{0}".format(f)) + raise NotImplementedError(f"unrecognized export argument:{f}") return f @@ -700,9 +696,7 @@ def package_export(f, pak, fmt=None, **kwargs): try: f = array2d_export(f, a, **kwargs) except: - f.logger.warn( - "error adding {0} as variable".format(a.name) - ) + f.logger.warn(f"error adding {a.name} as variable") elif a.data_type == DataType.array3d: f = array3d_export(f, a, **kwargs) elif a.data_type == DataType.transient2d: @@ -741,7 +735,7 @@ def package_export(f, pak, fmt=None, **kwargs): ) else: - raise NotImplementedError("unrecognized export argument:{0}".format(f)) + raise NotImplementedError(f"unrecognized export argument:{f}") def generic_array_export( @@ -751,7 +745,7 @@ def generic_array_export( dimensions=("time", "layer", "y", "x"), precision_str="f4", units="unitless", - **kwargs + **kwargs, ): """ Method to export a generic array to NetCdf @@ -806,7 +800,7 @@ def generic_array_export( attribs["min"] = mn attribs["max"] = mx if np.isnan(attribs["min"]) or np.isnan(attribs["max"]): - raise Exception("error processing {0}: all NaNs".format(var_name)) + raise Exception(f"error processing {var_name}: all NaNs") try: var = f.create_variable( var_name, @@ -815,15 +809,13 @@ def generic_array_export( dimensions=dimensions, ) except Exception as e: - estr = "error creating variable {0}:\n{1}".format(var_name, str(e)) + estr = f"error creating variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) try: var[:] = array except Exception as e: - estr = "error setting array to variable {0}:\n{1}".format( - var_name, str(e) - ) + estr = f"error setting array to variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) return f @@ -883,7 +875,7 @@ def mflist_export(f, mfl, **kwargs): for k in range(array.shape[0]): # aname = name+"{0:03d}_{1:02d}".format(kk, k) n = shapefile_utils.shape_attr_name(name, length=4) - aname = "{}{}{}".format(n, k + 1, int(kk) + 1) + aname = f"{n}{k + 1}{int(kk) + 1}" array_dict[aname] = array[k] shapefile_utils.write_grid_shapefile(f, modelgrid, array_dict) else: @@ -917,11 +909,11 @@ def mflist_export(f, mfl, **kwargs): # for name, array in m4d.items(): for name, array in mfl.masked_4D_arrays_itr(): - var_name = base_name + "_" + name + var_name = f"{base_name}_{name}" if isinstance(f, dict): f[var_name] = array continue - f.log("processing {0} attribute".format(name)) + f.log(f"processing {name} attribute") units = None if var_name in NC_UNITS_FORMAT: @@ -937,9 +929,7 @@ def mflist_export(f, mfl, **kwargs): attribs["min"] = np.nanmin(array) attribs["max"] = np.nanmax(array) if np.isnan(attribs["min"]) or np.isnan(attribs["max"]): - raise Exception( - "error processing {0}: all NaNs".format(var_name) - ) + raise Exception(f"error processing {var_name}: all NaNs") if units is not None: attribs["units"] = units @@ -952,9 +942,7 @@ def mflist_export(f, mfl, **kwargs): dimensions=dim_tuple, ) except Exception as e: - estr = "error creating variable {0}:\n{1}".format( - var_name, str(e) - ) + estr = f"error creating variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) @@ -962,16 +950,14 @@ def mflist_export(f, mfl, **kwargs): try: var[:] = array except Exception as e: - estr = "error setting array to variable {0}:\n{1}".format( - var_name, str(e) - ) + estr = f"error setting array to variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) - f.log("processing {0} attribute".format(name)) + f.log(f"processing {name} attribute") return f else: - raise NotImplementedError("unrecognized export argument:{0}".format(f)) + raise NotImplementedError(f"unrecognized export argument:{f}") def transient2d_export(f, t2d, fmt=None, **kwargs): @@ -1015,9 +1001,7 @@ def transient2d_export(f, t2d, fmt=None, **kwargs): array_dict = {} for kper in range(t2d.model.modeltime.nper): u2d = t2d[kper] - name = "{}_{}".format( - shapefile_utils.shape_attr_name(u2d.name), kper + 1 - ) + name = f"{shapefile_utils.shape_attr_name(u2d.name)}_{kper + 1}" array_dict[name] = u2d.array shapefile_utils.write_grid_shapefile(f, modelgrid, array_dict) @@ -1076,7 +1060,7 @@ def transient2d_export(f, t2d, fmt=None, **kwargs): attribs["min"] = mn attribs["max"] = mx if np.isnan(attribs["min"]) or np.isnan(attribs["max"]): - raise Exception("error processing {0}: all NaNs".format(var_name)) + raise Exception(f"error processing {var_name}: all NaNs") try: dim_tuple = ("time",) + f.dimension_names var = f.create_variable( @@ -1086,15 +1070,13 @@ def transient2d_export(f, t2d, fmt=None, **kwargs): dimensions=dim_tuple, ) except Exception as e: - estr = "error creating variable {0}:\n{1}".format(var_name, str(e)) + estr = f"error creating variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) try: var[:, 0] = array except Exception as e: - estr = "error setting array to variable {0}:\n{1}".format( - var_name, str(e) - ) + estr = f"error setting array to variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) return f @@ -1123,7 +1105,7 @@ def transient2d_export(f, t2d, fmt=None, **kwargs): kpers=kpers, ) else: - raise NotImplementedError("unrecognized export argument:{0}".format(f)) + raise NotImplementedError(f"unrecognized export argument:{f}") def array3d_export(f, u3d, fmt=None, **kwargs): @@ -1170,9 +1152,7 @@ def array3d_export(f, u3d, fmt=None, **kwargs): else: dname = u2d.name array = u2d.array - name = "{}_{}".format( - shapefile_utils.shape_attr_name(dname), ilay + 1 - ) + name = f"{shapefile_utils.shape_attr_name(dname)}_{ilay + 1}" array_dict[name] = array shapefile_utils.write_grid_shapefile(f, modelgrid, array_dict) @@ -1250,7 +1230,7 @@ def array3d_export(f, u3d, fmt=None, **kwargs): attribs["min"] = mn attribs["max"] = mx if np.isnan(attribs["min"]) or np.isnan(attribs["max"]): - raise Exception("error processing {0}: all NaNs".format(var_name)) + raise Exception(f"error processing {var_name}: all NaNs") try: var = f.create_variable( var_name, @@ -1259,15 +1239,13 @@ def array3d_export(f, u3d, fmt=None, **kwargs): dimensions=f.dimension_names, ) except Exception as e: - estr = "error creating variable {0}:\n{1}".format(var_name, str(e)) + estr = f"error creating variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) try: var[:] = array except Exception as e: - estr = "error setting array to variable {0}:\n{1}".format( - var_name, str(e) - ) + estr = f"error setting array to variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) return f @@ -1298,7 +1276,7 @@ def array3d_export(f, u3d, fmt=None, **kwargs): ) else: - raise NotImplementedError("unrecognized export argument:{0}".format(f)) + raise NotImplementedError(f"unrecognized export argument:{f}") def array2d_export(f, u2d, fmt=None, **kwargs): @@ -1395,7 +1373,7 @@ def array2d_export(f, u2d, fmt=None, **kwargs): attribs["min"] = mn attribs["max"] = mx if np.isnan(attribs["min"]) or np.isnan(attribs["max"]): - raise Exception("error processing {0}: all NaNs".format(var_name)) + raise Exception(f"error processing {var_name}: all NaNs") try: var = f.create_variable( var_name, @@ -1404,15 +1382,13 @@ def array2d_export(f, u2d, fmt=None, **kwargs): dimensions=f.dimension_names[1:], ) except Exception as e: - estr = "error creating variable {0}:\n{1}".format(var_name, str(e)) + estr = f"error creating variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) try: var[:] = array except Exception as e: - estr = "error setting array to variable {0}:\n{1}".format( - var_name, str(e) - ) + estr = f"error setting array to variable {var_name}:\n{e!s}" f.logger.warn(estr) raise Exception(estr) return f @@ -1442,7 +1418,7 @@ def array2d_export(f, u2d, fmt=None, **kwargs): ) else: - raise NotImplementedError("unrecognized export argument:{0}".format(f)) + raise NotImplementedError(f"unrecognized export argument:{f}") def export_array( @@ -1523,18 +1499,18 @@ def export_array( ) # enforce .asc ending nrow, ncol = a.shape a[np.isnan(a)] = nodata - txt = "ncols {:d}\n".format(ncol) - txt += "nrows {:d}\n".format(nrow) - txt += "xllcorner {:f}\n".format(xoffset) - txt += "yllcorner {:f}\n".format(yoffset) - txt += "cellsize {}\n".format(cellsize) + txt = f"ncols {ncol}\n" + txt += f"nrows {nrow}\n" + txt += f"xllcorner {xoffset:f}\n" + txt += f"yllcorner {yoffset:f}\n" + txt += f"cellsize {cellsize}\n" # ensure that nodata fmt consistent w values - txt += "NODATA_value {}\n".format(fmt) % (nodata) + txt += f"NODATA_value {fmt}\n" % (nodata) with open(filename, "w") as output: output.write(txt) with open(filename, "ab") as output: np.savetxt(output, a, **kwargs) - print("wrote {}".format(filename)) + print(f"wrote {filename}") elif filename.lower().endswith(".tif"): if ( @@ -1575,7 +1551,7 @@ def export_array( elif a.dtype.name == "float32": dtype = rasterio.float32 else: - msg = 'ERROR: invalid dtype "{}"'.format(a.dtype.name) + msg = f'ERROR: invalid dtype "{a.dtype.name}"' raise TypeError(msg) meta = { @@ -1591,7 +1567,7 @@ def export_array( meta.update(kwargs) with rasterio.open(filename, "w", **meta) as dst: dst.write(a) - print("wrote {}".format(filename)) + print(f"wrote {filename}") elif filename.lower().endswith(".shp"): from ..export.shapefile_utils import write_grid_shapefile @@ -1617,7 +1593,7 @@ def export_contours( fieldname="level", epsg=None, prj=None, - **kwargs + **kwargs, ): """ Convert matplotlib contour plot object to shapefile. @@ -1754,7 +1730,7 @@ def export_contourf( pg = Polygon([(x, y) for x, y in zip(xa, ya)], interiors=interiors) geoms += [pg] - print("Writing {} polygons".format(len(level))) + print(f"Writing {len(level)} polygons") # Create recarray ra = np.array(level, dtype=[(fieldname, float)]).view(np.recarray) @@ -1773,7 +1749,7 @@ def export_array_contours( maxlevels=1000, epsg=None, prj=None, - **kwargs + **kwargs, ): """ Contour an array using matplotlib; write shapefile of contours. @@ -1816,9 +1792,7 @@ def export_array_contours( imin = np.nanmin(a) imax = np.nanmax(a) nlevels = np.round(np.abs(imax - imin) / interval, 2) - msg = "{:.0f} levels at interval of {} > maxlevels={}".format( - nlevels, interval, maxlevels - ) + msg = f"{nlevels:.0f} levels at interval of {interval} > maxlevels={maxlevels}" assert nlevels < maxlevels, msg levels = np.arange(imin, imax, interval) ax = plt.subplots()[-1] diff --git a/flopy/export/vtk.py b/flopy/export/vtk.py index 37862cf3e..898fcd572 100644 --- a/flopy/export/vtk.py +++ b/flopy/export/vtk.py @@ -83,7 +83,7 @@ def open_element(self, tag): self.write_string(">") indent = self.indent_level * self.indent_char self.indent_level += 1 - tag_string = "\n" + indent + "<%s" % tag + tag_string = f"\n{indent}<{tag}" self.write_string(tag_string) self.open_tag = True self.current.append(tag) @@ -97,7 +97,7 @@ def close_element(self, tag=None): self.write_string(">") self.open_tag = False indent = self.indent_level * self.indent_char - tag_string = "\n" + indent + "" % tag + tag_string = f"\n{indent}" self.write_string(tag_string) else: self.write_string("/>") @@ -108,7 +108,7 @@ def close_element(self, tag=None): def add_attributes(self, **kwargs): assert self.open_tag for key in kwargs: - st = ' %s="%s"' % (key, kwargs[key]) + st = f' {key}="{kwargs[key]}"' self.write_string(st) return self @@ -297,7 +297,7 @@ def write_array(self, array, actwcells=None, **kwargs): def _write_size(self, block_size): # size is a 64 bit unsigned integer - byte_order = self.byte_order + "Q" + byte_order = f"{self.byte_order}Q" block_size = struct.pack(byte_order, block_size) self.f.write(block_size) @@ -529,8 +529,7 @@ def _vtk_grid_type(self, vtk_grid_type="auto"): ] if not any(vtk_grid_type in s for s in allowable_types): raise ValueError( - '"' + vtk_grid_type + '" is not a correct ' - "vtk_grid_type." + f'"{vtk_grid_type}" is not a correct vtk_grid_type.' ) if ( vtk_grid_type == "ImageData" @@ -731,7 +730,7 @@ def write(self, output_file, timeval=None): # output file output_file = output_file + self.file_extension if self.verbose: - print("Writing vtk file: " + output_file) + print(f"Writing vtk file: {output_file}") # initialize xml file if self.binary: @@ -777,9 +776,7 @@ def write(self, output_file, timeval=None): npoints = ncells * 8 if self.verbose: print( - "Number of point is {}, Number of cells is {}\n".format( - npoints, ncells - ) + f"Number of point is {npoints}, Number of cells is {ncells}\n" ) # piece @@ -819,11 +816,11 @@ def write(self, output_file, timeval=None): elif self.vtk_grid_type == "ImageData": # note: in vtk, "extent" actually means indices of grid lines - vtk_extent_str = "0 {} 0 {} 0 {}".format(self.nx, self.ny, self.nz) + vtk_extent_str = f"0 {self.nx} 0 {self.ny} 0 {self.nz}" xml.add_attributes(WholeExtent=vtk_extent_str) grid_extent = self.modelgrid.xyzextent - vtk_origin_str = "{} {} {}".format( - grid_extent[0], grid_extent[2], grid_extent[4] + vtk_origin_str = ( + f"{grid_extent[0]} {grid_extent[2]} {grid_extent[4]}" ) xml.add_attributes(Origin=vtk_origin_str) vtk_spacing_str = "{} {} {}".format( @@ -838,7 +835,7 @@ def write(self, output_file, timeval=None): elif self.vtk_grid_type == "RectilinearGrid": # note: in vtk, "extent" actually means indices of grid lines - vtk_extent_str = "0 {} 0 {} 0 {}".format(self.nx, self.ny, self.nz) + vtk_extent_str = f"0 {self.nx} 0 {self.ny} 0 {self.nz}" xml.add_attributes(WholeExtent=vtk_extent_str) # piece @@ -1328,7 +1325,7 @@ def export_cbc( os.mkdir(otfolder) # set up the pvd file to make the output files time enabled - pvdfilename = model.name + "_CBC.pvd" + pvdfilename = f"{model.name}_CBC.pvd" pvdfile = open(os.path.join(otfolder, pvdfilename), "w") pvdfile.write( @@ -1393,15 +1390,14 @@ def export_cbc( addarray = False count = 1 for kstpkper_i in kstpkper: - ot_base = "{}_CBC_KPER{}_KSTP{}".format( - model_name, kstpkper_i[1] + 1, kstpkper_i[0] + 1 + ot_base = ( + f"{model_name}_CBC_KPER{kstpkper_i[1] + 1}_KSTP{kstpkper_i[0] + 1}" ) otfile = os.path.join(otfolder, ot_base) pvdfile.write( - """\n""".format( - count, ot_base - ) + f""" +""" ) for name in keylist: @@ -1519,7 +1515,7 @@ def export_heads( os.mkdir(otfolder) # start writing the pvd file to make the data time aware - pvdfilename = model.name + "_" + text + ".pvd" + pvdfilename = f"{model.name}_{text}.pvd" pvdfile = open(os.path.join(otfolder, pvdfilename), "w") pvdfile.write( @@ -1562,17 +1558,17 @@ def export_heads( for kstpkper_i in kstpkper: hdarr = hds.get_data(kstpkper_i) vtk.add_array(text, hdarr) - ot_base = ("{}_" + text + "_KPER{}_KSTP{}").format( - model.name, kstpkper_i[1] + 1, kstpkper_i[0] + 1 + ot_base = ( + f"{model.name}_{text}_" + f"KPER{kstpkper_i[1] + 1}_KSTP{kstpkper_i[0] + 1}" ) otfile = os.path.join(otfolder, ot_base) # vtk.write(otfile, timeval=totim_dict[(kstp, kper)]) vtk.write(otfile) pvdfile.write( - """\n""".format( - count, ot_base - ) + f""" +""" ) count += 1 @@ -1822,7 +1818,7 @@ def export_transient( vtk.add_array(name, t2d_array_input, array2d=True) - otname = "{}{}0{}".format(name, separator, kper + 1) + otname = f"{name}{separator}0{kper + 1}" otfile = os.path.join(output_folder, otname) vtk.write(otfile, timeval=to_tim[kper]) @@ -1830,7 +1826,7 @@ def export_transient( for kper in kpers: vtk.add_array(name, array[kper]) - otname = "{}{}0{}".format(name, separator, kper + 1) + otname = f"{name}{separator}0{kper + 1}" otfile = os.path.join(output_folder, otname) vtk.write(otfile, timeval=to_tim[kper]) return @@ -2047,9 +2043,7 @@ def export_package( # else: # time = None # set up output file - otfile = os.path.join( - otfolder, "{}_0{}".format(pak_name, kper + 1) - ) + otfile = os.path.join(otfolder, f"{pak_name}_0{kper + 1}") for name, array in sorted(array_dict.items()): if array.array2d: array_shape = array.array.shape diff --git a/flopy/mbase.py b/flopy/mbase.py index c4f7a166e..415097fda 100644 --- a/flopy/mbase.py +++ b/flopy/mbase.py @@ -266,8 +266,7 @@ def _check(self, chk, level=1): chk.summary_array, r.summary_array ).view(np.recarray) chk.passed += [ - "{} package: {}".format(r.package.name[0], psd) - for psd in r.passed + f"{r.package.name[0]} package: {psd}" for psd in r.passed ] chk.summarize() return chk @@ -311,7 +310,7 @@ def __init__( model_ws=None, structured=True, verbose=False, - **kwargs + **kwargs, ): """Initialize BaseModel.""" super().__init__() @@ -331,9 +330,8 @@ def __init__( os.makedirs(model_ws) except: print( - "\n{0:s} not valid, workspace-folder was changed to {1:s}\n".format( - model_ws, os.getcwd() - ) + f"\n{model_ws} not valid, " + f"workspace-folder was changed to {os.getcwd()}\n" ) model_ws = os.getcwd() self._model_ws = model_ws @@ -568,8 +566,7 @@ def add_package(self, p): pn = p.name if self.verbose: print( - "\nWARNING:\n unit {} of package {} " - "already in use.".format(u, pn) + f"\nWARNING:\n unit {u} of package {pn} already in use." ) self.package_units.append(u) for i, pp in enumerate(self.packagelist): @@ -579,7 +576,7 @@ def add_package(self, p): if self.verbose: print( "\nWARNING:\n Two packages of the same type, " - "Replacing existing '{}' package.".format(p.name[0]) + f"Replacing existing '{p.name[0]}' package." ) self.packagelist[i] = p return @@ -690,10 +687,10 @@ def _output_msg(self, i, add=True): else: txt1 = "Removing" txt2 = "from" - msg = "{} {} (unit={}) {} the output list.".format( - txt1, self.output_fnames[i], self.output_units[i], txt2 + print( + f"{txt1} {self.output_fnames[i]} (unit={self.output_units[i]}) " + f"{txt2} the output list." ) - print(msg) def add_output_file( self, unit, fname=None, extension="cbc", binflag=True, package=None @@ -743,7 +740,7 @@ def add_output_file( if add_cbc: if fname is None: - fname = self.name + "." + extension + fname = f"{self.name}.{extension}" # check if this file name exists for a different unit number if fname in self.output_fnames: idx = self.output_fnames.index(fname) @@ -752,12 +749,10 @@ def add_output_file( # include unit number in fname if package has # not been passed if package is None: - fname = self.name + ".{}.".format(unit) + extension + fname = f"{self.name}.{unit}.{extension}" # include package name in fname else: - fname = ( - self.name + ".{}.".format(package) + extension - ) + fname = f"{self.name}.{package}.{extension}" else: fname = os.path.basename(fname) self.add_output(fname, unit, binflag=binflag, package=package) @@ -781,11 +776,10 @@ def add_output(self, fname, unit, binflag=False, package=None): """ if fname in self.output_fnames: if self.verbose: - msg = ( + print( "BaseModel.add_output() warning: " - "replacing existing filename {}".format(fname) + f"replacing existing filename {fname}" ) - print(msg) idx = self.output_fnames.index(fname) if self.verbose: self._output_msg(idx, add=False) @@ -973,11 +967,10 @@ def add_external(self, fname, unit, binflag=False, output=False): """ if fname in self.external_fnames: if self.verbose: - msg = ( + print( "BaseModel.add_external() warning: " - "replacing existing filename {}".format(fname) + f"replacing existing filename {fname}" ) - print(msg) idx = self.external_fnames.index(fname) self.external_fnames.pop(idx) self.external_units.pop(idx) @@ -985,10 +978,7 @@ def add_external(self, fname, unit, binflag=False, output=False): self.external_output.pop(idx) if unit in self.external_units: if self.verbose: - msg = ( - "BaseModel.add_external() warning: " - "replacing existing unit {}".format(unit) - ) + msg = f"BaseModel.add_external() warning: replacing existing unit {unit}" print(msg) idx = self.external_units.index(unit) self.external_fnames.pop(idx) @@ -1099,11 +1089,7 @@ def get_name_file_entries(self): for i in range(len(p.name)): if p.unit_number[i] == 0: continue - s = "{:14s} {:5d} {}".format( - p.name[i], - p.unit_number[i], - p.file_name[i], - ) + s = f"{p.name[i]:14s} {p.unit_number[i]:5d} {p.file_name[i]}" if p.extra[i]: s += " " + p.extra[i] lines.append(s) @@ -1162,18 +1148,18 @@ def set_version(self, version): # check that this is a valid model version if self.version not in list(self.version_types.keys()): err = ( - "Error: Unsupported model version ({}).".format(self.version) - + " Valid model versions are:" + f"Error: Unsupported model version ({self.version}). " + "Valid model versions are:" ) for v in list(self.version_types.keys()): - err += " {}".format(v) + err += f" {v}" raise Exception(err) # set namefile heading - heading = "# Name file for {}, generated by Flopy version {}.".format( - self.version_types[self.version], __version__ + self.heading = ( + f"# Name file for {self.version_types[self.version]}, " + f"generated by Flopy version {__version__}." ) - self.heading = heading # set heading for each package for p in self.get_package_list(): @@ -1205,10 +1191,10 @@ def change_model_ws(self, new_pth=None, reset_external=False): new_pth = os.getcwd() if not os.path.exists(new_pth): try: - print("\ncreating model workspace...\n {}".format(new_pth)) + print(f"\ncreating model workspace...\n {new_pth}") os.makedirs(new_pth) except: - raise OSError("{} not valid, workspace-folder".format(new_pth)) + raise OSError(f"{new_pth} not valid, workspace-folder") # line = '\n{} not valid, workspace-folder '.format(new_pth) + \ # 'was changed to {}\n'.format(os.getcwd()) # print(line) @@ -1217,7 +1203,7 @@ def change_model_ws(self, new_pth=None, reset_external=False): # --reset the model workspace old_pth = self._model_ws self._model_ws = new_pth - line = "\nchanging model workspace...\n {}\n".format(new_pth) + line = f"\nchanging model workspace...\n {new_pth}\n" sys.stdout.write(line) # reset the paths for each package for pp in self.packagelist: @@ -1384,9 +1370,7 @@ def write_input(self, SelPackList=False, check=False): """ if check: # run check prior to writing input - self.check( - f="{}.chk".format(self.name), verbose=self.verbose, level=1 - ) + self.check(f=f"{self.name}.chk", verbose=self.verbose, level=1) # reset the model to free_format if parameter substitution was # performed on a model load @@ -1694,22 +1678,18 @@ def run_model( exe = which(exe_name + ".exe") if exe is None: raise Exception( - "The program {} does not exist or is not executable.".format( - exe_name - ) + f"The program {exe_name} does not exist or is not executable." ) else: if not silent: print( - "FloPy is using the following " - "executable to run the model: {}".format(exe) + f"FloPy is using the following executable to run the model: {exe}" ) if namefile is not None: if not os.path.isfile(os.path.join(model_ws, namefile)): raise Exception( - "The namefile for this model does not exists: " - "{}".format(namefile) + f"The namefile for this model does not exists: {namefile}" ) # simple little function for the thread to target @@ -1775,7 +1755,7 @@ def q_output(output, q): now = datetime.now() dt = now - last tsecs = dt.total_seconds() - lastsec - line = "(elapsed:{0})-->{1}".format(tsecs, line) + line = f"(elapsed:{tsecs})-->{line}" lastsec = tsecs + lastsec buff.append(line) if not silent: diff --git a/flopy/mf6/coordinates/modelgrid.py b/flopy/mf6/coordinates/modelgrid.py index f3d2e1c52..da9867dda 100644 --- a/flopy/mf6/coordinates/modelgrid.py +++ b/flopy/mf6/coordinates/modelgrid.py @@ -9,7 +9,7 @@ class MFGridException(Exception): """ def __init__(self, error): - Exception.__init__(self, "MFGridException: {}".format(error)) + Exception.__init__(self, f"MFGridException: {error}") class ModelCell: @@ -433,28 +433,28 @@ def get_grid_type(simulation_data, model_name): structure = MFStructure() if ( package_recarray.search_data( - "dis{}".format(structure.get_version_string()), 0 + f"dis{structure.get_version_string()}", 0 ) is not None ): return DiscretizationType.DIS elif ( package_recarray.search_data( - "disv{}".format(structure.get_version_string()), 0 + f"disv{structure.get_version_string()}", 0 ) is not None ): return DiscretizationType.DISV elif ( package_recarray.search_data( - "disu{}".format(structure.get_version_string()), 0 + f"disu{structure.get_version_string()}", 0 ) is not None ): return DiscretizationType.DISU elif ( package_recarray.search_data( - "disl{}".format(structure.get_version_string()), 0 + f"disl{structure.get_version_string()}", 0 ) is not None ): diff --git a/flopy/mf6/coordinates/simulationtime.py b/flopy/mf6/coordinates/simulationtime.py index e54f7a03d..df58080b0 100644 --- a/flopy/mf6/coordinates/simulationtime.py +++ b/flopy/mf6/coordinates/simulationtime.py @@ -129,8 +129,7 @@ def get_sp_time_steps(self, sp_num): ].get_data() if len(period_data) <= sp_num: raise FlopyException( - "Stress period {} was requested but does not " - "exist.".format(sp_num) + f"Stress period {sp_num} was requested but does not exist." ) return period_data[sp_num][1] diff --git a/flopy/mf6/data/mfdata.py b/flopy/mf6/data/mfdata.py index 424e35de9..4a49eafa3 100644 --- a/flopy/mf6/data/mfdata.py +++ b/flopy/mf6/data/mfdata.py @@ -122,10 +122,10 @@ def _load_prep(self, block_header): transient_key = block_header.get_transient_key() if isinstance(transient_key, int): if not self._verify_sp(transient_key): - message = 'Invalid transient key "{}" in block' ' "{}"'.format( - transient_key, block_header.name + raise MFInvalidTransientBlockHeaderException( + f'Invalid transient key "{transient_key}" ' + f'in block "{block_header.name}"' ) - raise MFInvalidTransientBlockHeaderException(message) if transient_key not in self._data_storage: self.add_transient_key(transient_key) self._current_key = transient_key @@ -226,7 +226,7 @@ def __init__( path=None, dimensions=None, *args, - **kwargs + **kwargs, ): # initialize self._current_key = None @@ -251,7 +251,7 @@ def __init__( index = 0 while self._path in self._simulation_data.mfdata: self._path = self._org_path[:-1] + ( - "{}_{}".format(self._org_path[-1], index), + f"{self._org_path[-1]}_{index}", ) index += 1 self._structure_init() @@ -438,9 +438,7 @@ def get_description(self, description=None, data_set=None): else: if data_item.description: if description: - description = "{}\n{}".format( - description, data_item.description - ) + description = f"{description}\n{data_item.description}" else: description = data_item.description return description @@ -516,7 +514,7 @@ def _get_constant_formatting_string( self._data_dimensions, verify_data=self._simulation_data.verify_data, ) - return "{}{}".format(sim_data.indent_string.join(const_format), suffix) + return f"{sim_data.indent_string.join(const_format)}{suffix}" def _get_aux_var_name(self, aux_var_index): aux_var_names = self._data_dimensions.package_dim.get_aux_variables() @@ -591,7 +589,7 @@ def _get_external_formatting_string(self, layer, ext_file_action): layer_storage.fname, model_name, ext_file_action ) layer_storage.fname = ext_file_path - ext_format = ["OPEN/CLOSE", "'{}'".format(ext_file_path)] + ext_format = ["OPEN/CLOSE", f"'{ext_file_path}'"] if storage.data_structure_type != DataStructureType.recarray: if layer_storage.factor is not None: data_type = self.structure.get_datum_type( @@ -607,6 +605,4 @@ def _get_external_formatting_string(self, layer, ext_file_action): if layer_storage.iprn is not None: ext_format.append("IPRN") ext_format.append(str(layer_storage.iprn)) - return "{}\n".format( - self._simulation_data.indent_string.join(ext_format) - ) + return f"{self._simulation_data.indent_string.join(ext_format)}\n" diff --git a/flopy/mf6/data/mfdataarray.py b/flopy/mf6/data/mfdataarray.py index aba227c9a..1c7e58a9c 100644 --- a/flopy/mf6/data/mfdataarray.py +++ b/flopy/mf6/data/mfdataarray.py @@ -191,8 +191,7 @@ def __getitem__(self, k): ) comment = ( - 'Unable to resolve index "{}" for ' - "multidimensional data.".format(k) + f'Unable to resolve index "{k}" for multidimensional data.' ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( @@ -378,10 +377,7 @@ def set_layered_data(self, layered_data): self._data_dimensions.get_model_grid().grid_type() == DiscretizationType.DISU ): - comment = ( - "Layered option not available for unstructured " - "grid. {}".format(self._path) - ) + comment = f"Layered option not available for unstructured grid. {self._path}" else: comment = ( 'Data "{}" does not support layered option. ' @@ -430,10 +426,7 @@ def make_layered(self): self._data_dimensions.get_model_grid().grid_type() == DiscretizationType.DISU ): - comment = ( - "Layered option not available for unstructured " - "grid. {}".format(self._path) - ) + comment = f"Layered option not available for unstructured grid. {self._path}" else: comment = ( 'Data "{}" does not support layered option. ' @@ -517,11 +510,9 @@ def store_as_external_file( if len(layer_list) > 0: fname, ext = os.path.splitext(external_file_path) if len(layer_list) == 1: - file_path = "{}{}".format(fname, ext) + file_path = f"{fname}{ext}" else: - file_path = "{}_layer{}{}".format( - fname, current_layer + 1, ext - ) + file_path = f"{fname}_layer{current_layer + 1}{ext}" else: file_path = external_file_path if isinstance(current_layer, int): @@ -568,8 +559,7 @@ def store_as_external_file( self.structure.get_model(), self.structure.get_package(), self._path, - "storing data in external file " - "{}".format(external_file_path), + f"storing data in external file {external_file_path}", self.structure.name, inspect.stack()[0][3], type_, @@ -656,7 +646,7 @@ def store_internal( self.structure.get_model(), self.structure.get_package(), self._path, - "storing data {} internally".format(self.structure.name), + f"storing data {self.structure.name} internally", self.structure.name, inspect.stack()[0][3], type_, @@ -1044,9 +1034,7 @@ def _get_file_entry( if shape_ml.get_total_size() == 1: data_indent = indent else: - data_indent = "{}{}".format( - indent, self._simulation_data.indent_string - ) + data_indent = f"{indent}{self._simulation_data.indent_string}" file_entry_array = [] if data_storage.data_structure_type == DataStructureType.scalar: @@ -1077,17 +1065,15 @@ def _get_file_entry( # for cellid and numeric indices convert from 0 base to 1 based data = abs(data) + 1 file_entry_array.append( - "{}{}{}{}\n".format(indent, self.structure.name, indent, data) + f"{indent}{self.structure.name}{indent}{data}\n" ) elif data_storage.layered: if not layered_aux: if not self.structure.data_item_structures[0].just_data: name = self.structure.name - file_entry_array.append( - "{}{}{}{}\n".format(indent, name, indent, "LAYERED") - ) + file_entry_array.append(f"{indent}{name}{indent}LAYERED\n") else: - file_entry_array.append("{}{}\n".format(indent, "LAYERED")) + file_entry_array.append(f"{indent}LAYERED\n") if layer is None: layer_min = shape_ml.first_index() @@ -1131,12 +1117,10 @@ def _get_file_entry( if not self.structure.data_item_structures[0].just_data: if self._data_name == "aux": file_entry_array.append( - "{}{}\n".format(indent, self._get_aux_var_name([0])) + f"{indent}{self._get_aux_var_name([0])}\n" ) else: - file_entry_array.append( - "{}{}\n".format(indent, self.structure.name) - ) + file_entry_array.append(f"{indent}{self.structure.name}\n") data_storage_type = data_storage.layer_storage[0].data_storage_type file_entry_array.append( @@ -1203,8 +1187,8 @@ def _get_file_entry_layer( if layered_aux: try: # display aux name - file_entry = "{}{}\n".format( - indent_string, self._get_aux_var_name(layer) + file_entry = ( + f"{indent_string}{self._get_aux_var_name(layer)}\n" ) except Exception as ex: type_, value_, traceback_ = sys.exc_info() @@ -1222,8 +1206,8 @@ def _get_file_entry_layer( self._simulation_data.debug, ex, ) - indent_string = "{}{}".format( - indent_string, self._simulation_data.indent_string + indent_string = ( + f"{indent_string}{self._simulation_data.indent_string}" ) data_storage = self._get_storage_obj() @@ -1231,9 +1215,7 @@ def _get_file_entry_layer( # internal data header + data format_str = self._get_internal_formatting_string(layer).upper() lay_str = self._get_data_layer_string(layer, data_indent).upper() - file_entry = "{}{}{}\n{}".format( - file_entry, indent_string, format_str, lay_str - ) + file_entry = f"{file_entry}{indent_string}{format_str}\n{lay_str}" elif storage_type == DataStorageType.internal_constant: # constant data try: @@ -1257,21 +1239,20 @@ def _get_file_entry_layer( const_str = self._get_constant_formatting_string( const_val, layer, self._data_type ).upper() - file_entry = "{}{}{}".format(file_entry, indent_string, const_str) + file_entry = f"{file_entry}{indent_string}{const_str}" else: # external data ext_str = self._get_external_formatting_string( layer, ext_file_action ) - file_entry = "{}{}{}".format(file_entry, indent_string, ext_str) + file_entry = f"{file_entry}{indent_string}{ext_str}" # add to active list of external files try: file_path = data_storage.get_external_file_path(layer) except Exception as ex: type_, value_, traceback_ = sys.exc_info() comment = ( - "Could not get external file path for layer " - '"{}"'.format(layer), + f'Could not get external file path for layer "{layer}"', ) raise MFDataException( self.structure.get_model(), @@ -1298,7 +1279,7 @@ def _get_data_layer_string(self, layer, data_indent): data = self._get_storage_obj().get_data(layer, False) except Exception as ex: type_, value_, traceback_ = sys.exc_info() - comment = 'Could not get data for layer "{}"'.format(layer) + comment = f'Could not get data for layer "{layer}"' raise MFDataException( self.structure.get_model(), self.structure.get_package(), @@ -1369,7 +1350,7 @@ def plot( mflay=None, fignum=None, title=None, - **kwargs + **kwargs, ): """ Plot 3-D model input data @@ -1438,7 +1419,7 @@ def plot( filename_base=filename_base, file_extension=file_extension, fignum=fignum, - **kwargs + **kwargs, ) elif num_plottable_layers > 1: axes = PlotUtilities._plot_util3d_helper( @@ -1447,7 +1428,7 @@ def plot( file_extension=file_extension, mflay=mflay, fignum=fignum, - **kwargs + **kwargs, ) else: axes = None @@ -1577,9 +1558,9 @@ def store_as_external_file( ): fname, ext = os.path.splitext(external_file_path) if DatumUtil.is_int(sp): - full_name = "{}_{}{}".format(fname, sp + 1, ext) + full_name = f"{fname}_{sp + 1}{ext}" else: - full_name = "{}_{}{}".format(fname, sp, ext) + full_name = f"{fname}_{sp}{ext}" super().store_as_external_file( full_name, layer, @@ -1834,7 +1815,7 @@ def plot( file_extension=None, mflay=None, fignum=None, - **kwargs + **kwargs, ): """ Plot transient array model input data @@ -1897,6 +1878,6 @@ def plot( file_extension=file_extension, kper=kper, fignum=fignum, - **kwargs + **kwargs, ) return axes diff --git a/flopy/mf6/data/mfdatalist.py b/flopy/mf6/data/mfdatalist.py index 8bd5376ce..a77cb3cf1 100644 --- a/flopy/mf6/data/mfdatalist.py +++ b/flopy/mf6/data/mfdatalist.py @@ -310,10 +310,7 @@ def store_internal( self._simulation_data.verbosity_level.value >= VerbosityLevel.verbose.value ): - print( - "Storing {} internally.." - ".".format(self.structure.name) - ) + print(f"Storing {self.structure.name} internally...") internal_data = { "data": data, } @@ -683,7 +680,7 @@ def search_data(self, search_term, col=None): type_, value_, traceback_, - "search_term={}\ncol={}".format(search_term, col), + f"search_term={search_term}\ncol={col}", self._simulation_data.debug, ex, ) @@ -749,7 +746,7 @@ def _get_file_entry( ext_string = self._get_external_formatting_string( 0, ext_file_action ) - file_entry.append("{}{}{}".format(indent, indent, ext_string)) + file_entry.append(f"{indent}{indent}{ext_string}") # write file except Exception as ex: @@ -818,9 +815,7 @@ def _get_file_entry( ): text_line.append(storage.comments[mflist_line].text) - file_entry.append( - "{}{}\n".format(indent, indent.join(text_line)) - ) + file_entry.append(f"{indent}{indent.join(text_line)}\n") self._crnt_line_num += 1 # unfreeze model grid @@ -847,9 +842,7 @@ def _get_file_entry_record( const_str = self._get_constant_formatting_string( storage.get_const_val(0), 0, data_type, "" ) - text_line.append( - "{}{}{}".format(indent, indent, const_str.upper()) - ) + text_line.append(f"{indent}{indent}{const_str.upper()}") except Exception as ex: type_, value_, traceback_ = sys.exc_info() raise MFDataException( @@ -1032,7 +1025,7 @@ def _get_file_entry_record( data_key = data_val.lower() if data_key not in data_item.keystring_dict: keystr_struct = data_item.keystring_dict[ - "{}record".format(data_key) + f"{data_key}record" ] else: keystr_struct = data_item.keystring_dict[ @@ -1279,7 +1272,7 @@ def plot( filename_base=None, file_extension=None, mflay=None, - **kwargs + **kwargs, ): """ Plot boundary condition (MfList) data @@ -1349,7 +1342,7 @@ def plot( filename_base=None, file_extension=None, mflay=None, - **kwargs + **kwargs, ) @@ -1573,9 +1566,9 @@ def store_as_external_file( ): fname, ext = os.path.splitext(external_file_path) if datautil.DatumUtil.is_int(sp): - full_name = "{}_{}{}".format(fname, sp + 1, ext) + full_name = f"{fname}_{sp + 1}{ext}" else: - full_name = "{}_{}{}".format(fname, sp, ext) + full_name = f"{fname}_{sp}{ext}" super().store_as_external_file( full_name, @@ -1832,7 +1825,7 @@ def plot( filename_base=None, file_extension=None, mflay=None, - **kwargs + **kwargs, ): """ Plot stress period boundary condition (MfList) data for a specified @@ -1910,7 +1903,7 @@ def plot( filename_base=filename_base, file_extension=file_extension, mflay=mflay, - **kwargs + **kwargs, ) return axes diff --git a/flopy/mf6/data/mfdatascalar.py b/flopy/mf6/data/mfdatascalar.py index cbb31e193..6bab86163 100644 --- a/flopy/mf6/data/mfdatascalar.py +++ b/flopy/mf6/data/mfdatascalar.py @@ -184,8 +184,8 @@ def set_data(self, data): ) except Exception as ex: type_, value_, traceback_ = sys.exc_info() - comment = 'Could not convert data "{}" to type "{}".'.format( - data, self._data_type + comment = ( + f'Could not convert data "{data}" to type "{self._data_type}".' ) raise MFDataException( self.structure.get_model(), @@ -205,8 +205,8 @@ def set_data(self, data): storage.set_data(converted_data, key=self._current_key) except Exception as ex: type_, value_, traceback_ = sys.exc_info() - comment = 'Could not set data "{}" to type "{}".'.format( - data, self._data_type + comment = ( + f'Could not set data "{data}" to type "{self._data_type}".' ) raise MFDataException( self.structure.get_model(), @@ -270,9 +270,7 @@ def add_one(self): self._get_storage_obj().set_data(current_val + 1) except Exception as ex: type_, value_, traceback_ = sys.exc_info() - comment = 'Could increment data "{}" by one' ".".format( - current_val - ) + comment = f'Could increment data "{current_val}" by one.' raise MFDataException( self.structure.get_model(), self.structure.get_package(), @@ -496,7 +494,7 @@ def get_file_entry( index += 1 text = self._simulation_data.indent_string.join(text_line) - return "{}{}\n".format(self._simulation_data.indent_string, text) + return f"{self._simulation_data.indent_string}{text}\n" else: data_item = self.structure.data_item_structures[0] try: @@ -569,9 +567,7 @@ def get_file_entry( self._simulation_data.debug, ) if values_only: - return "{}{}".format( - self._simulation_data.indent_string, values - ) + return f"{self._simulation_data.indent_string}{values}" else: # keyword + data return "{}{}{}{}\n".format( @@ -682,7 +678,7 @@ def plot(self, filename_base=None, file_extension=None, **kwargs): self, filename_base=filename_base, file_extension=file_extension, - **kwargs + **kwargs, ) return axes @@ -917,7 +913,7 @@ def plot( file_extension=None, kper=0, fignum=None, - **kwargs + **kwargs, ): """ Plot transient scalar model data @@ -980,6 +976,6 @@ def plot( file_extension=file_extension, kper=kper, fignum=fignum, - **kwargs + **kwargs, ) return axes diff --git a/flopy/mf6/data/mfdatastorage.py b/flopy/mf6/data/mfdatastorage.py index 34a88797e..233ee92bf 100644 --- a/flopy/mf6/data/mfdatastorage.py +++ b/flopy/mf6/data/mfdatastorage.py @@ -127,7 +127,7 @@ def name(self): def __repr__(self): if self.data_storage_type == DataStorageType.internal_constant: - return "constant {}".format(self.get_data_const_val()) + return f"constant {self.get_data_const_val()}" else: return repr(self.get_data()) @@ -452,7 +452,7 @@ def get_data_str(self, formal): # Assemble strings for internal array data for index, storage in enumerate(self.layer_storage.elements()): if self.layered: - layer_str = "Layer_{}".format(str(index + 1)) + layer_str = f"Layer_{index + 1}" else: layer_str = "" if storage.data_storage_type == DataStorageType.internal_array: @@ -505,15 +505,13 @@ def _get_layer_header_str(self, layer): self.layer_storage[layer].data_storage_type == DataStorageType.external_file ): - header_list.append( - "open/close {}".format(self.layer_storage[layer].fname) - ) + header_list.append(f"open/close {self.layer_storage[layer].fname}") elif ( self.layer_storage[layer].data_storage_type == DataStorageType.internal_constant ): lr = self.layer_storage[layer] - header_list.append("constant {}".format(lr)) + header_list.append(f"constant {lr}") else: header_list.append("internal") if ( @@ -521,13 +519,9 @@ def _get_layer_header_str(self, layer): and self.layer_storage[layer].factor != 1 and self.data_structure_type != DataStructureType.recarray ): - header_list.append( - "factor {}".format(self.layer_storage[layer].factor) - ) + header_list.append(f"factor {self.layer_storage[layer].factor}") if self.layer_storage[layer].iprn is not None: - header_list.append( - "iprn {}".format(self.layer_storage[layer].iprn) - ) + header_list.append(f"iprn {self.layer_storage[layer].iprn}") if len(header_list) > 0: return ", ".join(header_list) else: @@ -691,7 +685,7 @@ def _access_data(self, layer, return_data=False, apply_mult=True): ): return None if not (layer is None or self.layer_storage.in_shape(layer)): - message = 'Layer "{}" is an invalid layer.'.format(layer) + message = f'Layer "{layer}" is an invalid layer.' type_, value_, traceback_ = sys.exc_info() raise MFDataException( self.data_dimensions.structure.get_model(), @@ -1100,9 +1094,7 @@ def get_active_layer_indices(self): def get_external(self, layer=None): if not (layer is None or self.layer_storage.in_shape(layer)): - message = 'Can not get external data for layer "{}"' ".".format( - layer - ) + message = f'Can not get external data for layer "{layer}".' type_, value_, traceback_ = sys.exc_info() raise MFDataException( self.data_dimensions.structure.get_model(), @@ -1394,7 +1386,7 @@ def _build_recarray(self, data, key, autofill): except: data_expected = [] for data_type in self._recarray_type_list: - data_expected.append("<{}>".format(data_type[0])) + data_expected.append(f"<{data_type[0]}>") message = ( "An error occurred when storing data " '"{}" in a recarray. {} data is a one ' @@ -1696,7 +1688,7 @@ def external_to_external( ".".format(self.layer_storage.get_total_size()) ) else: - message = 'layer "{}" is not a valid layer'.format(layer) + message = f'layer "{layer}" is not a valid layer' type_, value_, traceback_ = sys.exc_info() raise MFDataException( self.data_dimensions.structure.get_model(), @@ -1715,7 +1707,7 @@ def external_to_external( if layer is None: layer = 1 if self.layer_storage[layer].fname is None: - message = "No file name exists for layer {}.".format(layer) + message = f"No file name exists for layer {layer}." type_, value_, traceback_ = sys.exc_info() raise MFDataException( self.data_dimensions.structure.get_model(), @@ -2230,9 +2222,9 @@ def _duplicate_last_item(self): if DatumUtil.is_int(arr_item_name[-1]): new_item_num = int(arr_item_name[-1]) + 1 new_item_name = "_".join(arr_item_name[0:-1]) - new_item_name = "{}_{}".format(new_item_name, new_item_num) + new_item_name = f"{new_item_name}_{new_item_num}" else: - new_item_name = "{}_1".format(last_item[0]) + new_item_name = f"{last_item[0]}_1" self._recarray_type_list.append((new_item_name, last_item[1])) def _build_full_data(self, apply_multiplier=False): @@ -2408,11 +2400,7 @@ def _is_type(self, data_item, data_type): self._simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print( - "{} type checking currently not supported".format( - data_type - ) - ) + print(f"{data_type} type checking currently not supported") return True def _fill_dimensions(self, data_iter, dimensions): @@ -2489,9 +2477,7 @@ def set_tas(self, tas_name, tas_label, current_key, check_name=True): # this is a time series array with a valid tas variable self.data_structure_type = DataStructureType.scalar try: - self.set_data( - "{} {}".format(tas_label, tas_name), 0, key=current_key - ) + self.set_data(f"{tas_label} {tas_name}", 0, key=current_key) except Exception as ex: type_, value_, traceback_ = sys.exc_info() structure = self.data_dimensions.structure @@ -2611,7 +2597,7 @@ def build_type_list( # add potential data after keystring to type list ks_data_item = deepcopy(data_item) ks_data_item.type = DatumType.string - ks_data_item.name = "{}_data".format(ks_data_item.name) + ks_data_item.name = f"{ks_data_item.name}_data" ks_rec_type = ks_data_item.get_rec_type() if not min_size: self._append_type_lists( @@ -2638,7 +2624,7 @@ def build_type_list( # items of variable length. assume everything at # the end of the data line is related to the last # keystring - name = "{}_{}".format(ks_data_item.name, idx) + name = f"{ks_data_item.name}_{idx}" self._append_type_lists( name, ks_rec_type, ks_data_item.is_cellid ) @@ -2660,7 +2646,7 @@ def build_type_list( data_item.type != DatumType.string and data_item.type != DatumType.keyword ): - name = "{}_label".format(data_item.name) + name = f"{data_item.name}_label" self._append_type_lists( name, object, data_item.is_cellid ) @@ -2745,9 +2731,7 @@ def build_type_list( if not data_item.optional or not min_size: for index in range(0, resolved_shape[0]): if resolved_shape[0] > 1: - name = "{}_{}".format( - data_item.name, index - ) + name = f"{data_item.name}_{index}" else: name = data_item.name self._append_type_lists( @@ -2858,7 +2842,7 @@ def _has_layer_dim(self): def _store_prep(self, layer, multiplier): if not (layer is None or self.layer_storage.in_shape(layer)): - message = "Layer {} is not a valid layer.".format(layer) + message = f"Layer {layer} is not a valid layer." type_, value_, traceback_ = sys.exc_info() raise MFDataException( self.data_dimensions.structure.get_model(), diff --git a/flopy/mf6/data/mfdatautil.py b/flopy/mf6/data/mfdatautil.py index 21a608d6c..c041ca75e 100644 --- a/flopy/mf6/data/mfdatautil.py +++ b/flopy/mf6/data/mfdatautil.py @@ -213,7 +213,7 @@ def to_string( return str(val) if len(arr_val) > 1: # quote any string with spaces - string_val = "'{}'".format(val) + string_val = f"'{val}'" if data_item is not None and data_item.ucase: return string_val.upper() else: @@ -304,9 +304,9 @@ def add_text(self, additional_text, new_line=False): if isinstance(self.text, list): self.text.append(additional_text) elif new_line: - self.text = "{}{}".format(self.text, additional_text) + self.text = f"{self.text}{additional_text}" else: - self.text = "{} {}".format(self.text, additional_text) + self.text = f"{self.text} {additional_text}" """ Get the comment text in the format to write to package files. @@ -329,7 +329,7 @@ def get_file_entry(self, eoln_suffix=True): if self.text.strip(): file_entry = self.text if eoln_suffix: - file_entry = "{}\n".format(file_entry) + file_entry = f"{file_entry}\n" return file_entry def _recursive_get(self, base_list): @@ -337,11 +337,9 @@ def _recursive_get(self, base_list): if base_list and self.sim_data.comments_on: for item in base_list: if not isinstance(item, str) and isinstance(item, list): - file_entry = "{}{}".format( - file_entry, self._recursive_get(item) - ) + file_entry = f"{file_entry}{self._recursive_get(item)}" else: - file_entry = "{} {}".format(file_entry, item) + file_entry = f"{file_entry} {item}" return file_entry """ @@ -427,7 +425,7 @@ def _recursive_write(self, fd, base_list): if not isinstance(item, str) and isinstance(item, list): self._recursive_write(fd, item) else: - fd.write(" {}".format(item)) + fd.write(f" {item}") class TemplateGenerator: @@ -839,8 +837,8 @@ class MFDocString: def __init__(self, description): self.indent = " " self.description = description - self.parameter_header = "{}Parameters\n{}----------".format( - self.indent, self.indent + self.parameter_header = ( + f"{self.indent}Parameters\n{self.indent}----------" ) self.parameters = [] self.model_parameters = [] @@ -877,7 +875,7 @@ def get_doc_string(self, model_doc_string=False): else: param_list = self.parameters for parameter in param_list: - doc_string += "{}\n".format(parameter) + doc_string += f"{parameter}\n" if not model_doc_string: - doc_string += '\n{}"""'.format(self.indent) + doc_string += f'\n{self.indent}"""' return doc_string diff --git a/flopy/mf6/data/mffileaccess.py b/flopy/mf6/data/mffileaccess.py index 53c185c9c..e7c1cb762 100644 --- a/flopy/mf6/data/mffileaccess.py +++ b/flopy/mf6/data/mffileaccess.py @@ -117,9 +117,8 @@ def _load_keyword(self, arr_line, index_num, keyword): if not keyword_match and aux_var_index is None: aux_text = "" if aux_var_names is not None: - aux_text = " or auxiliary variables {}".format( - aux_var_names[0] - ) + # TODO: aux_var_names is None, so this is never touched + aux_text = f" or auxiliary variables {aux_var_names[0]}" message = ( 'Error reading variable "{}". Expected ' 'variable keyword "{}"{} not found ' @@ -158,7 +157,7 @@ def _open_ext_file(self, fname, binary=False, write=False): else: options = "r" if binary: - options = "{}b".format(options) + options = f"{options}b" try: fd = open(read_file, options) return fd @@ -482,9 +481,7 @@ def get_data_string(self, data, data_type, data_indent=""): self._simulation_data.debug, ex, ) - layer_data_string[-1] = "{}{}{}".format( - layer_data_string[-1], indent_str, data_lyr - ) + layer_data_string[-1] += f"{indent_str}{data_lyr}" if jagged_def is not None: if line_data_count == jagged_def[jagged_def_index]: @@ -503,7 +500,7 @@ def get_data_string(self, data, data_type, data_indent=""): # clean up the text at the end of the array layer_data_string[-1] = layer_data_string[-1].strip() if len(layer_data_string) == 1: - return "{}{}\n".format(data_indent, layer_data_string[0].rstrip()) + return f"{data_indent}{layer_data_string[0].rstrip()}\n" else: return "\n".join(layer_data_string) @@ -674,8 +671,8 @@ def load_from_package( dimensions = storage.get_data_dimensions(layer_shape) except Exception as ex: type_, value_, traceback_ = sys.exc_info() - comment = 'Could not get data shape for key "{}".'.format( - self._current_key + comment = ( + f'Could not get data shape for key "{self._current_key}".' ) raise MFDataException( self.structure.get_model(), @@ -841,7 +838,7 @@ def _load_layer( self.structure.get_model(), self.structure.get_package(), self._path, - "reading data from file {}".format(file_handle.name), + f"reading data from file {file_handle.name}", self.structure.name, inspect.stack()[0][3], type_, @@ -863,7 +860,7 @@ def _load_layer( print_format=print_format, ) except Exception as ex: - comment = 'Could not store data: "{}"'.format(data_shaped) + comment = f'Could not store data: "{data_shaped}"' type_, value_, traceback_ = sys.exc_info() raise MFDataException( self.structure.get_model(), @@ -928,8 +925,8 @@ def _resolve_data_shape(self, data, layer_shape, storage): dimensions = storage.get_data_dimensions(layer_shape) except Exception as ex: type_, value_, traceback_ = sys.exc_info() - comment = 'Could not get data shape for key "{}".'.format( - self._current_key + comment = ( + f'Could not get data shape for key "{self._current_key}".' ) raise MFDataException( self.structure.get_model(), @@ -951,8 +948,7 @@ def _resolve_data_shape(self, data, layer_shape, storage): except Exception as ex: type_, value_, traceback_ = sys.exc_info() comment = ( - "Could not reshape data to dimensions " - '"{}".'.format(dimensions) + f'Could not reshape data to dimensions "{dimensions}".' ) raise MFDataException( self.structure.get_model(), @@ -1242,8 +1238,7 @@ def read_list_data_from_file( else: # not a constant or open/close line, exception is valid comment = ( - "Unable to process line 1 of data list: " - '"{}"'.format(current_line) + f'Unable to process line 1 of data list: "{current_line}"' ) type_, value_, traceback_ = sys.exc_info() raise MFDataException( @@ -1645,9 +1640,7 @@ def load_list_line( name_data not in data_item.keystring_dict ): - name_data = "{}record".format( - name_data - ) + name_data = f"{name_data}record" if ( name_data not in data_item.keystring_dict @@ -1673,11 +1666,7 @@ def load_list_line( data_item.keystring_dict[name_data] ) if data_item_ks == 0: - comment = ( - "Could not find " - "keystring " - "{}.".format(name_data) - ) + comment = f"Could not find keystring {name_data}." ( type_, value_, @@ -2295,9 +2284,7 @@ def load_from_package( try: storage.set_data(True, key=self._current_key) except Exception as ex: - message = 'Could not set data "True" with key "{}".'.format( - self._current_key - ) + message = f'Could not set data "True" with key "{self._current_key}".' type_, value_, traceback_ = sys.exc_info() raise MFDataException( self.structure.get_model(), diff --git a/flopy/mf6/data/mfstructure.py b/flopy/mf6/data/mfstructure.py index a827036c2..444621030 100644 --- a/flopy/mf6/data/mfstructure.py +++ b/flopy/mf6/data/mfstructure.py @@ -142,7 +142,7 @@ def get_file_list(self): if package_abbr not in file_order: file_order.append(package_abbr) return [ - fname + ".dfn" for fname in file_order if fname + ".dfn" in files + f"{fname}.dfn" for fname in file_order if f"{fname}.dfn" in files ] def _file_type(self, file_name): @@ -951,7 +951,7 @@ def set_value(self, line, common): self.python_name = self.name.replace("-", "_").lower() # don't allow name to be a python keyword if keyword.iskeyword(self.name): - self.python_name = "{}_".format(self.python_name) + self.python_name = f"{self.python_name}_" # performance optimizations if self.name == "aux": self.is_aux = True @@ -1099,20 +1099,18 @@ def set_value(self, line, common): self.jagged_array = arr_line[1] def get_type_string(self): - return "[{}]".format(self.type_string) + return f"[{self.type_string}]" def get_description(self, line_size, initial_indent, level_indent): - item_desc = "* {} ({}) {}".format( - self.name, self.type_string, self.description - ) + item_desc = f"* {self.name} ({self.type_string}) {self.description}" if self.numeric_index or self.is_cellid: # append zero-based index text - item_desc = "{} {}".format(item_desc, numeric_index_text) + item_desc = f"{item_desc} {numeric_index_text}" twr = TextWrapper( width=line_size, initial_indent=initial_indent, drop_whitespace=True, - subsequent_indent=" {}".format(initial_indent), + subsequent_indent=f" {initial_indent}", ) item_desc = "\n".join(twr.wrap(item_desc)) return item_desc @@ -1121,25 +1119,22 @@ def get_doc_string(self, line_size, initial_indent, level_indent): description = self.get_description( line_size, initial_indent + level_indent, level_indent ) - param_doc_string = "{} : {}".format( - self.python_name, self.get_type_string() - ) + param_doc_string = f"{self.python_name} : {self.get_type_string()}" twr = TextWrapper( width=line_size, initial_indent=initial_indent, - subsequent_indent=" {}".format(initial_indent), + subsequent_indent=f" {initial_indent}", drop_whitespace=True, ) param_doc_string = "\n".join(twr.wrap(param_doc_string)) - param_doc_string = "{}\n{}".format(param_doc_string, description) + param_doc_string = f"{param_doc_string}\n{description}" return param_doc_string def get_keystring_desc(self, line_size, initial_indent, level_indent): if self.type != DatumType.keystring: raise StructException( - 'Can not get keystring description for "{}" ' - "because it is not a keystring" - ".".format(self.name), + f'Can not get keystring description for "{self.name}" ' + "because it is not a keystring", self.path, ) @@ -1147,7 +1142,7 @@ def get_keystring_desc(self, line_size, initial_indent, level_indent): description = "" for key, item in self.keystring_dict.items(): if description: - description = "{}\n".format(description) + description = f"{description}\n" description = "{}{}".format( description, item.get_doc_string(line_size, initial_indent, level_indent), @@ -1205,7 +1200,7 @@ def _resolve_common(arr_line, common): return arr_line if not (arr_line[2] in common and len(arr_line) >= 4): raise StructException( - 'Could not find line "{}" in common dfn' ".".format(arr_line) + f'Could not find line "{arr_line}" in common dfn.' ) close_bracket_loc = MFDataItemStructure._find_close_bracket( arr_line[2:] @@ -1274,7 +1269,7 @@ def _str_to_enum_type(self, type_string): elif type_string.lower() == "repeating_record": return DatumType.repeating_record else: - exc_text = 'Data item type "{}" not supported.'.format(type_string) + exc_text = f'Data item type "{type_string}" not supported.' raise StructException(exc_text, self.path) def get_rec_type(self): @@ -1579,7 +1574,7 @@ def _fpmerge_data_item(self, item, dfn_list): if item.name.lower() in mfstruct.flopy_dict: # read flopy-specific dfn data for name, value in mfstruct.flopy_dict[item.name.lower()].items(): - line = "{} {}".format(name, value) + line = f"{name} {value}" item.set_value(line, None) if dfn_list is not None: dfn_list[-1].append(line) @@ -1706,9 +1701,9 @@ def get_type_string(self): type_header = "[" type_footer = "]" if self.repeating: - type_footer = "] ... [{}]".format(type_string) + type_footer = f"] ... [{type_string}]" - return "{}{}{}".format(type_header, type_string, type_footer) + return f"{type_header}{type_string}{type_footer}" def get_docstring_type_array(self, type_array): for index, item in enumerate(self.data_item_structures): @@ -1739,28 +1734,28 @@ def get_description( item_desc = item.get_description( line_size, initial_indent + level_indent, level_indent ) - description = "{}\n{}".format(description, item_desc) + description = f"{description}\n{item_desc}" elif datastr.display_item(index): if len(description.strip()) > 0: - description = "{}\n".format(description) + description = f"{description}\n" item_desc = item.description if item.numeric_index or item.is_cellid: # append zero-based index text - item_desc = "{} {}".format(item_desc, numeric_index_text) + item_desc = f"{item_desc} {numeric_index_text}" - item_desc = "* {} ({}) {}".format(item.name, itype, item_desc) + item_desc = f"* {item.name} ({itype}) {item_desc}" twr = TextWrapper( width=line_size, initial_indent=initial_indent, - subsequent_indent=" {}".format(initial_indent), + subsequent_indent=f" {initial_indent}", ) item_desc = "\n".join(twr.wrap(item_desc)) - description = "{}{}".format(description, item_desc) + description = f"{description}{item_desc}" if item.type == DatumType.keystring: keystr_desc = item.get_keystring_desc( line_size, initial_indent + level_indent, level_indent ) - description = "{}\n{}".format(description, keystr_desc) + description = f"{description}\n{keystr_desc}" return description def get_subpackage_description( @@ -1783,7 +1778,7 @@ def get_subpackage_description( twr = TextWrapper( width=line_size, initial_indent=initial_indent, - subsequent_indent=" {}".format(initial_indent), + subsequent_indent=f" {initial_indent}", ) return "\n".join(twr.wrap(item_desc)) @@ -1795,9 +1790,7 @@ def get_doc_string( line_size, initial_indent + level_indent, level_indent ) var_name = self.parameter_name - type_name = "{}varname:data{} or {} data".format( - "{", "}", self.construct_data - ) + type_name = f"{{varname:data}} or {self.construct_data} data" else: description = self.get_description( line_size, initial_indent + level_indent, level_indent @@ -1805,14 +1798,14 @@ def get_doc_string( var_name = self.python_name type_name = self.get_type_string() - param_doc_string = "{} : {}".format(var_name, type_name) + param_doc_string = f"{var_name} : {type_name}" twr = TextWrapper( width=line_size, initial_indent=initial_indent, - subsequent_indent=" {}".format(initial_indent), + subsequent_indent=f" {initial_indent}", ) param_doc_string = "\n".join(twr.wrap(param_doc_string)) - param_doc_string = "{}\n{}".format(param_doc_string, description) + param_doc_string = f"{param_doc_string}\n{description}" return param_doc_string def get_type_array(self, type_array): @@ -2296,9 +2289,7 @@ def process_dfn(self, dfn_file): or dfn_file.dfn_type == DfnType.gnc_file or dfn_file.dfn_type == DfnType.mvr_file ): - model_ver = "{}{}".format( - dfn_file.model_type, MFStructure(True).get_version_string() - ) + model_ver = f"{dfn_file.model_type}{MFStructure(True).get_version_string()}" if model_ver not in self.model_struct_objs: self.add_model(model_ver) if dfn_file.dfn_type == DfnType.model_file: diff --git a/flopy/mf6/mfbase.py b/flopy/mf6/mfbase.py index 0d1f8b0f5..9f87820f0 100644 --- a/flopy/mf6/mfbase.py +++ b/flopy/mf6/mfbase.py @@ -15,7 +15,7 @@ class MFInvalidTransientBlockHeaderException(Exception): def __init__(self, error): Exception.__init__( - self, "MFInvalidTransientBlockHeaderException: {}".format(error) + self, f"MFInvalidTransientBlockHeaderException: {error}" ) @@ -26,7 +26,7 @@ class ReadAsArraysException(Exception): """ def __init__(self, error): - Exception.__init__(self, "ReadAsArraysException: {}".format(error)) + Exception.__init__(self, f"ReadAsArraysException: {error}") # external exceptions for users @@ -37,9 +37,7 @@ class FlopyException(Exception): def __init__(self, error, location=""): self.message = error - Exception.__init__( - self, "FlopyException: {} ({})".format(error, location) - ) + Exception.__init__(self, f"FlopyException: {error} ({location})") class StructException(Exception): @@ -49,9 +47,7 @@ class StructException(Exception): def __init__(self, error, location): self.message = error - Exception.__init__( - self, "StructException: {} ({})".format(error, location) - ) + Exception.__init__(self, f"StructException: {error} ({location})") class MFDataException(Exception): @@ -138,14 +134,14 @@ def __init__( # build error string error_message_0 = "An error occurred in " if self.data_element is not None and self.data_element != "": - error_message_1 = 'data element "{}" '.format(self.data_element) + error_message_1 = f'data element "{self.data_element}" ' else: error_message_1 = "" if self.model is not None and self.model != "": - error_message_2 = 'model "{}" '.format(self.model) + error_message_2 = f'model "{self.model}" ' else: error_message_2 = "" - error_message_3 = 'package "{}".'.format(self.package) + error_message_3 = f'package "{self.package}".' error_message_4 = ( ' The error occurred while {} in the "{}" method' ".".format(self.current_process, self.method_caught_in) @@ -153,9 +149,7 @@ def __init__( if len(self.messages) > 0: error_message_5 = "\nAdditional Information:\n" for index, message in enumerate(self.messages): - error_message_5 = "{}({}) {}\n".format( - error_message_5, index + 1, message - ) + error_message_5 = f"{error_message_5}({index + 1}) {message}\n" else: error_message_5 = "" error_message = "{}{}{}{}{}{}".format( @@ -324,9 +318,9 @@ def unique_file_name(file_name, lookup): def _build_file(file_name, num): file, ext = os.path.splitext(file_name) if ext: - return "{}_{}{}".format(file, num, ext) + return f"{file}_{num}{ext}" else: - return "{}_{}".format(file, num) + return f"{file}_{num}" @staticmethod def string_to_file_path(fp_string): @@ -338,9 +332,7 @@ def string_to_file_path(fp_string): arr_string = new_string.split(delimiter) if len(arr_string) > 1: if os.path.isabs(fp_string): - new_string = "{}{}{}".format( - arr_string[0], delimiter, arr_string[1] - ) + new_string = f"{arr_string[0]}{delimiter}{arr_string[1]}" else: new_string = os.path.join(arr_string[0], arr_string[1]) if len(arr_string) > 2: @@ -511,8 +503,8 @@ def package_factory(package_type, model_type): package : MFPackage subclass """ - package_abbr = "{}{}".format(model_type, package_type) - package_utl_abbr = "utl{}".format(package_type) + package_abbr = f"{model_type}{package_type}" + package_utl_abbr = f"utl{package_type}" package_list = [] # iterate through python files package_file_paths = PackageContainer.get_package_file_paths() @@ -591,16 +583,12 @@ def get_module(package_file_path): internal FloPy use only, not intended for end users.""" package_file_name = os.path.basename(package_file_path) module_path = os.path.splitext(package_file_name)[0] - module_name = "{}{}{}".format( - "Modflow", module_path[2].upper(), module_path[3:] - ) + module_name = f"Modflow{module_path[2].upper()}{module_path[3:]}" if module_name.startswith("__"): return None # import - return importlib.import_module( - "flopy.mf6.modflow.{}".format(module_path) - ) + return importlib.import_module(f"flopy.mf6.modflow.{module_path}") @staticmethod def get_package_file_paths(): diff --git a/flopy/mf6/mfmodel.py b/flopy/mf6/mfmodel.py index 60857f31c..f0006fe56 100644 --- a/flopy/mf6/mfmodel.py +++ b/flopy/mf6/mfmodel.py @@ -74,7 +74,7 @@ def __init__( structure=None, model_rel_path=".", verbose=False, - **kwargs + **kwargs, ): super().__init__(simulation.simulation_data, modelname) self.simulation = simulation @@ -86,7 +86,7 @@ def __init__( self.type = "Model" if model_nam_file is None: - model_nam_file = "{}.nam".format(modelname) + model_nam_file = f"{modelname}.nam" if add_to_simulation: self.structure = simulation.register_model( @@ -105,7 +105,7 @@ def __init__( self._verbose = verbose if model_nam_file is None: - self.model_nam_file = "{}.nam".format(modelname) + self.model_nam_file = f"{modelname}.nam" else: self.model_nam_file = model_nam_file @@ -126,8 +126,7 @@ def __init__( if len(kwargs) > 0: kwargs_str = ", ".join(kwargs.keys()) excpt_str = ( - 'Extraneous kwargs "{}" provided to ' - "MFModel.".format(kwargs_str) + f'Extraneous kwargs "{kwargs_str}" provided to MFModel.' ) raise FlopyException(excpt_str) @@ -135,8 +134,8 @@ def __init__( # create name file based on model type - support different model types package_obj = self.package_factory("nam", model_type[0:3]) if not package_obj: - excpt_str = "Name file could not be found for model" "{}.".format( - model_type[0:3] + excpt_str = ( + f"Name file could not be found for model{model_type[0:3]}." ) raise FlopyException(excpt_str) @@ -742,9 +741,9 @@ def load_base( vnum = mfstructure.MFStructure().get_version_string() # FIX: Transport - Priority packages maybe should not be hard coded priority_packages = { - "dis{}".format(vnum): 1, - "disv{}".format(vnum): 1, - "disu{}".format(vnum): 1, + f"dis{vnum}": 1, + f"disv{vnum}": 1, + f"disu{vnum}": 1, } packages_ordered = [] package_recarray = instance.simulation_data.mfdata[ @@ -777,7 +776,7 @@ def load_base( simulation.simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print(" skipping package {}...".format(ftype)) + print(f" skipping package {ftype}...") continue if model_rel_path and model_rel_path != ".": # strip off model relative path from the file path @@ -787,7 +786,7 @@ def load_base( simulation.simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print(" loading package {}...".format(ftype)) + print(f" loading package {ftype}...") # load package instance.load_package(ftype, fname, pname, strict, None) sim_data = simulation.simulation_data @@ -848,7 +847,7 @@ def write(self, ext_file_action=ExtFileAction.copy_relative_paths): self.simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print(" writing package {}...".format(pp._get_pname())) + print(f" writing package {pp._get_pname()}...") pp.write(ext_file_action=ext_file_action) def get_grid_type(self): @@ -864,28 +863,28 @@ def get_grid_type(self): structure = mfstructure.MFStructure() if ( package_recarray.search_data( - "dis{}".format(structure.get_version_string()), 0 + f"dis{structure.get_version_string()}", 0 ) is not None ): return DiscretizationType.DIS elif ( package_recarray.search_data( - "disv{}".format(structure.get_version_string()), 0 + f"disv{structure.get_version_string()}", 0 ) is not None ): return DiscretizationType.DISV elif ( package_recarray.search_data( - "disu{}".format(structure.get_version_string()), 0 + f"disu{structure.get_version_string()}", 0 ) is not None ): return DiscretizationType.DISU elif ( package_recarray.search_data( - "disl{}".format(structure.get_version_string()), 0 + f"disl{structure.get_version_string()}", 0 ) is not None ): @@ -1163,10 +1162,10 @@ def rename_all_packages(self, name): """ package_type_count = {} - self.name_file.filename = "{}.nam".format(name) + self.name_file.filename = f"{name}.nam" for package in self.packagelist: if package.package_type not in package_type_count: - package.filename = "{}.{}".format(name, package.package_type) + package.filename = f"{name}.{package.package_type}" package_type_count[package.package_type] = 1 else: package_type_count[package.package_type] += 1 @@ -1303,7 +1302,7 @@ def register_package( package.package_name = package.package_type if set_package_filename: - package._filename = "{}.{}".format(self.name, package.package_type) + package._filename = f"{self.name}.{package.package_type}" if add_to_package_list: self._add_package(package, path) @@ -1320,7 +1319,7 @@ def register_package( # recarray self.name_file.packages.update_record( [ - "{}6".format(pkg_type), + f"{pkg_type}6", package._filename, package.package_name, ], @@ -1386,9 +1385,7 @@ def load_package( # resolve dictionary name for package if dict_package_name is not None: if parent_package is not None: - dict_package_name = "{}_{}".format( - parent_package.path[-1], ftype - ) + dict_package_name = f"{parent_package.path[-1]}_{ftype}" else: # use dict_package_name as the base name if ftype in self._ftype_num_dict: @@ -1408,8 +1405,8 @@ def load_package( if pname is not None: dict_package_name = pname else: - dict_package_name = "{}_{}".format( - ftype, self._ftype_num_dict[ftype] + dict_package_name = ( + f"{ftype}_{self._ftype_num_dict[ftype]}" ) else: dict_package_name = ftype @@ -1432,7 +1429,7 @@ def load_package( package.load(strict) except ReadAsArraysException: # create ReadAsArrays package and load it instead - package_obj = self.package_factory("{}a".format(ftype), model_type) + package_obj = self.package_factory(f"{ftype}a", model_type) package = package_obj( self, filename=fname, diff --git a/flopy/mf6/mfpackage.py b/flopy/mf6/mfpackage.py index ed81d544a..6db2443e9 100644 --- a/flopy/mf6/mfpackage.py +++ b/flopy/mf6/mfpackage.py @@ -215,7 +215,7 @@ def write_header(self, fd): File object to write block header to. """ - fd.write("BEGIN {}".format(self.name)) + fd.write(f"BEGIN {self.name}") if len(self.data_items) > 0: if isinstance(self.data_items[0], mfdatascalar.MFScalar): one_based = ( @@ -245,7 +245,7 @@ def write_footer(self, fd): File object to write block footer to. """ - fd.write("END {}".format(self.name)) + fd.write(f"END {self.name}") if len(self.data_items) > 0: one_based = self.data_items[0].structure.type == DatumType.integer if isinstance(self.data_items[0], mfdatascalar.MFScalar): @@ -366,14 +366,14 @@ def _get_data_str(self, formal): if formal: ds_repr = repr(dataset) if len(ds_repr.strip()) > 0: - data_str = "{}{}\n{}\n".format( - data_str, dataset.structure.name, repr(dataset) + data_str = ( + f"{data_str}{dataset.structure.name}\n{dataset!r}\n" ) else: ds_str = str(dataset) if len(ds_str.strip()) > 0: - data_str = "{}{}\n{}\n".format( - data_str, dataset.structure.name, str(dataset) + data_str = ( + f"{data_str}{dataset.structure.name}\n{dataset!s}\n" ) return data_str @@ -815,8 +815,7 @@ def load(self, block_header, fd, strict=True): >= VerbosityLevel.verbose.value ): print( - ' opening external file "{}"..' - ".".format(file_name) + f' opening external file "{file_name}"...' ) external_file_info = arr_line fd_block = open(os.path.join(root_path, arr_line[1]), "r") @@ -825,10 +824,7 @@ def load(self, block_header, fd, strict=True): arr_line = datautil.PyListUtil.split_data_line(line) except: type_, value_, traceback_ = sys.exc_info() - message = ( - "Error reading external file specified in " - 'line "{}"'.format(line) - ) + message = f'Error reading external file specified in line "{line}"' raise MFDataException( self._container_package.model_name, self._container_package._get_pname(), @@ -851,8 +847,7 @@ def load(self, block_header, fd, strict=True): >= VerbosityLevel.verbose.value ): print( - " loading data {}.." - ".".format(dataset.structure.name) + f" loading data {dataset.structure.name}..." ) next_line = dataset.load( line, @@ -882,8 +877,7 @@ def load(self, block_header, fd, strict=True): >= VerbosityLevel.verbose.value ): print( - " loading child package {}.." - ".".format(package_info[0]) + f" loading child package {package_info[0]}..." ) pkg = self._model_or_sim.load_package( package_info[0], @@ -929,7 +923,7 @@ def load(self, block_header, fd, strict=True): line, fd_block, initial_comment ) except MFInvalidTransientBlockHeaderException as e: - warning_str = "WARNING: {}".format(e) + warning_str = f"WARNING: {e}" print(warning_str) self.block_headers.pop() return @@ -978,7 +972,7 @@ def _find_data_by_keyword(self, line, fd, initial_comment): self._simulation_data.verbosity_level.value >= VerbosityLevel.verbose.value ): - print(" loading data {}...".format(ds_name)) + print(f" loading data {ds_name}...") next_line = self.datasets[ds_name].load( next_line[1], fd, @@ -1008,8 +1002,7 @@ def _find_data_by_keyword(self, line, fd, initial_comment): >= VerbosityLevel.verbose.value ): print( - " loading child package {}.." - ".".format(package_info[1]) + f" loading child package {package_info[1]}..." ) pkg = self._model_or_sim.load_package( package_info[0], @@ -1065,8 +1058,7 @@ def _find_data_by_keyword(self, line, fd, initial_comment): >= VerbosityLevel.verbose.value ): print( - " loading child package {}.." - ".".format(package_info[0]) + f" loading child package {package_info[0]}..." ) pkg = self._model_or_sim.load_package( package_info[0], @@ -1122,9 +1114,7 @@ def _get_package_info(self, dataset): file_location = data package_info_list = [] file_path, file_name = os.path.split(file_location) - dict_package_name = "{}_{}".format( - package_type, self.path[-2] - ) + dict_package_name = f"{package_type}_{self.path[-2]}" package_info_list.append( (package_type, file_name, file_path, dict_package_name) ) @@ -1214,9 +1204,7 @@ def set_all_data_external( ) and dataset.enabled ): - file_path = "{}_{}.txt".format( - base_name, dataset.structure.name - ) + file_path = f"{base_name}_{dataset.structure.name}.txt" if external_data_folder is not None: # get simulation root path root_path = self._simulation_data.mfpath.get_sim_path() @@ -1280,19 +1268,15 @@ def _write_block(self, fd, block_header, ext_file_action): if self.external_file_name is not None: # write block contents to external file indent_string = self._simulation_data.indent_string - fd.write( - "{}open/close {}\n".format( - indent_string, self.external_file_name - ) - ) + fd.write(f"{indent_string}open/close {self.external_file_name}\n") fd_main = fd fd_path = os.path.split(os.path.realpath(fd.name))[0] try: fd = open(os.path.join(fd_path, self.external_file_name), "w") except: type_, value_, traceback_ = sys.exc_info() - message = "Error reading external file " '"{}"'.format( - self.external_file_name + message = ( + f'Error reading external file "{self.external_file_name}"' ) raise MFDataException( self._container_package.model_name, @@ -1317,8 +1301,7 @@ def _write_block(self, fd, block_header, ext_file_action): >= VerbosityLevel.verbose.value ): print( - " writing data {}.." - ".".format(dataset.structure.name) + f" writing data {dataset.structure.name}..." ) fd.write( dataset.get_file_entry(ext_file_action=ext_file_action) @@ -1349,10 +1332,10 @@ def _write_block(self, fd, block_header, ext_file_action): mfdata_except=mfde, model=self._container_package.model_name, package=self._container_package._get_pname(), - message="Error occurred while writing " - 'data "{}" in block "{}" to file' - ' "{}".'.format( - dataset.structure.name, self.structure.name, fd.name + message=( + "Error occurred while writing data " + f'"{dataset.structure.name}" in block ' + f'"{self.structure.name}" to file "{fd.name}"' ), ) # write trailing comments @@ -1560,7 +1543,7 @@ def __init__( if filename is None: self._filename = MFFileMgmt.string_to_file_path( - "{}.{}".format(self.model_or_sim.name, package_type) + f"{self.model_or_sim.name}.{package_type}" ) else: if not isinstance(filename, str): @@ -1739,11 +1722,11 @@ def _get_data_str(self, formal, show_data=True): ) ) if self.parent_file is not None and formal: - data_str = "{}parent_file = {}\n\n".format( - data_str, self.parent_file._get_pname() + data_str = ( + f"{data_str}parent_file = {self.parent_file._get_pname()}\n\n" ) else: - data_str = "{}\n".format(data_str) + data_str = f"{data_str}\n" if show_data: for block in self.blocks.values(): if formal: @@ -1906,7 +1889,7 @@ def build_child_packages_container(self, pkg_type, filerecord): pkg_type, self.model_or_sim.model_type ) # create child package object - child_pkgs_name = "utl{}packages".format(pkg_type) + child_pkgs_name = f"utl{pkg_type}packages" child_pkgs_obj = self.package_factory(child_pkgs_name, "") child_pkgs = child_pkgs_obj( self.model_or_sim, self, pkg_type, filerecord, None, package_obj @@ -2137,15 +2120,13 @@ def is_valid(self): and not block.enabled and block.is_allowed() ): - self.last_error = 'Required block "{}" not ' "enabled".format( - block.block_header.name + self.last_error = ( + f'Required block "{block.block_header.name}" not enabled' ) return False # Enabled blocks must be valid if block.enabled and not block.is_valid: - self.last_error = "Invalid block " '"{}"'.format( - block.block_header.name - ) + self.last_error = f'Invalid block "{block.block_header.name}"' return False return True @@ -2199,9 +2180,7 @@ def _load_blocks(self, fd_input_file, strict=True, max_blocks=sys.maxsize): # resolve the correct block to use block_key = block_header_info.name.lower() block_num = 1 - possible_key = "{}-{}".format( - block_header_info.name.lower(), block_num - ) + possible_key = f"{block_header_info.name.lower()}-{block_num}" if possible_key in self.blocks: block_key = possible_key block_header_name = block_header_info.name.lower() @@ -2209,9 +2188,7 @@ def _load_blocks(self, fd_input_file, strict=True, max_blocks=sys.maxsize): block_key in self.blocks and not self.blocks[block_key].is_allowed() ): - block_key = "{}-{}".format( - block_header_name, block_num - ) + block_key = f"{block_header_name}-{block_num}" block_num += 1 if block_key not in self.blocks: @@ -2284,9 +2261,7 @@ def _load_blocks(self, fd_input_file, strict=True, max_blocks=sys.maxsize): >= VerbosityLevel.verbose.value ): print( - " loading block {}...".format( - cur_block.structure.name - ) + f" loading block {cur_block.structure.name}..." ) # reset comments self.post_block_comments = MFComment( @@ -2475,7 +2450,7 @@ def _write_blocks(self, fd, ext_file_action): self.simulation_data.verbosity_level.value >= VerbosityLevel.verbose.value ): - print(" writing block {}...".format(block.structure.name)) + print(f" writing block {block.structure.name}...") # write block block.write(fd, ext_file_action=ext_file_action) block_num += 1 @@ -2602,7 +2577,7 @@ def __getitem__(self, k): if isinstance(k, int): if k < len(self._packages): return self._packages[k] - raise ValueError("Package index {} does not exist.".format(k)) + raise ValueError(f"Package index {k} does not exist.") def __setattr__(self, key, value): if ( @@ -2634,13 +2609,11 @@ def __default_file_path_base(self, file_path, suffix=""): file_name = ".".join(stem_lst[:-1]) if len(stem_lst) > 1: file_ext = stem_lst[-1] - return "{}.{}{}.{}".format( - file_name, file_ext, suffix, self._pkg_type - ) + return f"{file_name}.{file_ext}{suffix}.{self._pkg_type}" elif suffix != "": - return "{}.{}".format(stem, self._pkg_type) + return f"{stem}.{self._pkg_type}" else: - return "{}.{}.{}".format(stem, suffix, self._pkg_type) + return f"{stem}.{suffix}.{self._pkg_type}" def __file_path_taken(self, possible_path): for package in self._packages: diff --git a/flopy/mf6/modflow/mfsimulation.py b/flopy/mf6/modflow/mfsimulation.py index f230d38d8..c747e72ba 100644 --- a/flopy/mf6/modflow/mfsimulation.py +++ b/flopy/mf6/modflow/mfsimulation.py @@ -303,10 +303,9 @@ def set_sci_note_lower_thres(self, value): def _update_str_format(self): """Update floating point formatting strings.""" - self.reg_format_str = "{:.%dE}" % self.float_precision - self.sci_format_str = "{:%d.%df" "}" % ( - self.float_characters, - self.float_precision, + self.reg_format_str = f"{{:.{self.float_precision}E}}" + self.sci_format_str = ( + f"{{:{self.float_characters}.{self.float_precision}f}}" ) @@ -646,9 +645,7 @@ def load( instance.name_file.load(strict) # load TDIS file - tdis_pkg = "tdis{}".format( - mfstructure.MFStructure().get_version_string() - ) + tdis_pkg = f"tdis{mfstructure.MFStructure().get_version_string()}" tdis_attr = getattr(instance.name_file, tdis_pkg) instance._tdis_file = mftdis.ModflowTdis( instance, filename=tdis_attr.get_data() @@ -684,7 +681,7 @@ def load( model_obj = PackageContainer.model_factory(item[0][:-1].lower()) # load model if verbosity_level.value >= VerbosityLevel.normal.value: - print(" loading model {}...".format(item[0].lower())) + print(f" loading model {item[0].lower()}...") instance._models[item[2]] = model_obj.load( instance, instance.structure.model_struct_objs[item[0].lower()], @@ -734,10 +731,7 @@ def load( instance.simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print( - " skipping package {}.." - ".".format(exgfile[0].lower()) - ) + print(f" skipping package {exgfile[0].lower()}...") continue # get exchange type by removing numbers from exgtype exchange_type = "".join( @@ -751,9 +745,7 @@ def load( exchange_file_num = instance._exg_file_num[exchange_type] instance._exg_file_num[exchange_type] += 1 - exchange_name = "{}_EXG_{}".format( - exchange_type, exchange_file_num - ) + exchange_name = f"{exchange_type}_EXG_{exchange_file_num}" # find package class the corresponds to this exchange type package_obj = instance.package_factory( exchange_type.replace("-", "").lower(), "" @@ -791,8 +783,7 @@ def load( ) if verbosity_level.value >= VerbosityLevel.normal.value: print( - " loading exchange package {}.." - ".".format(exchange_file._get_pname()) + f" loading exchange package {exchange_file._get_pname()}..." ) exchange_file.load(strict) instance._exchange_files[exgfile[1]] = exchange_file @@ -825,18 +816,14 @@ def load( >= VerbosityLevel.normal.value ): print( - " skipping package {}.." - ".".format(solution_info[0].lower()) + f" skipping package {solution_info[0].lower()}..." ) continue ims_file = mfims.ModflowIms( instance, filename=solution_info[1], pname=solution_info[2] ) if verbosity_level.value >= VerbosityLevel.normal.value: - print( - " loading ims package {}.." - ".".format(ims_file._get_pname()) - ) + print(f" loading ims package {ims_file._get_pname()}...") ims_file.load(strict) instance.simulation_data.mfpath.set_last_accessed_path() @@ -879,7 +866,7 @@ def check(self, f=None, verbose=True, level=1): # check models for model in self._models.values(): - print('Checking model "{}"...'.format(model.name)) + print(f'Checking model "{model.name}"...') chk_list.append(model.check(f, verbose, level)) print("Checking for missing simulation packages...") @@ -951,7 +938,7 @@ def load_package( else: package_abbr = "GWF" # build package name and package - gnc_name = "{}-GNC_{}".format(package_abbr, self._gnc_file_num) + gnc_name = f"{package_abbr}-GNC_{self._gnc_file_num}" ghost_node_file = mfgwfgnc.ModflowGwfgnc( self, filename=fname, @@ -971,7 +958,7 @@ def load_package( else: package_abbr = "GWF" # build package name and package - mvr_name = "{}-MVR_{}".format(package_abbr, self._mvr_file_num) + mvr_name = f"{package_abbr}-MVR_{self._mvr_file_num}" mover_file = mfgwfmvr.ModflowGwfmvr( self, filename=fname, @@ -1076,7 +1063,7 @@ def register_ims_package(self, ims_file, model_list): # create unique file/package name if ims_file.package_name is None: file_num = len(self._ims_files) - 1 - ims_file.package_name = "ims_{}".format(file_num) + ims_file.package_name = f"ims_{file_num}" if ims_file.filename in self._ims_files: ims_file.filename = MFFileMgmt.unique_file_name( ims_file.filename, self._ims_files @@ -1115,7 +1102,7 @@ def register_ims_package(self, ims_file, model_list): # associate any models in the model list to this # simulation file version_string = mfstructure.MFStructure().get_version_string() - ims_pkg = "ims{}".format(version_string) + ims_pkg = f"ims{version_string}" new_record = [ims_pkg, ims_file.filename] for model in model_list: new_record.append(model) @@ -1138,7 +1125,7 @@ def _rename_package_group(group_dict, name): package_type_count = {} for package in group_dict.values(): if package.package_type not in package_type_count: - package.filename = "{}.{}".format(name, package.package_type) + package.filename = f"{name}.{package.package_type}" package_type_count[package.package_type] = 1 else: package_type_count[package.package_type] += 1 @@ -1158,9 +1145,7 @@ def rename_all_packages(self, name): """ if self._tdis_file is not None: - self._tdis_file.filename = "{}.{}".format( - name, self._tdis_file.package_type - ) + self._tdis_file.filename = f"{name}.{self._tdis_file.package_type}" self._rename_package_group(self._exchange_files, name) self._rename_package_group(self._ims_files, name) @@ -1270,10 +1255,7 @@ def write_simulation( self.simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print( - " writing ims package {}.." - ".".format(ims_file._get_pname()) - ) + print(f" writing ims package {ims_file._get_pname()}...") ims_file.write(ext_file_action=ext_file_action) # write exchange files @@ -1367,7 +1349,7 @@ def write_simulation( self.simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print(" writing package {}...".format(pp._get_pname())) + print(f" writing package {pp._get_pname()}...") pp.write(ext_file_action=ext_file_action) # FIX: model working folder should be model name file folder @@ -1378,7 +1360,7 @@ def write_simulation( self.simulation_data.verbosity_level.value >= VerbosityLevel.normal.value ): - print(" writing model {}...".format(model.name)) + print(f" writing model {model.name}...") model.write(ext_file_action=ext_file_action) self.simulation_data.mfpath.set_last_accessed_path() @@ -1565,9 +1547,7 @@ def get_exchange_file(self, filename): if filename in self._exchange_files: return self._exchange_files[filename] else: - excpt_str = 'Exchange file "{}" can not be found' ".".format( - filename - ) + excpt_str = f'Exchange file "{filename}" can not be found.' raise FlopyException(excpt_str) def get_mvr_file(self, filename): @@ -1586,7 +1566,7 @@ def get_mvr_file(self, filename): if filename in self._mover_files: return self._mover_files[filename] else: - excpt_str = 'MVR file "{}" can not be ' "found.".format(filename) + excpt_str = f'MVR file "{filename}" can not be found.' raise FlopyException(excpt_str) def get_gnc_file(self, filename): @@ -1605,7 +1585,7 @@ def get_gnc_file(self, filename): if filename in self._ghost_node_files: return self._ghost_node_files[filename] else: - excpt_str = 'GNC file "{}" can not be ' "found.".format(filename) + excpt_str = f'GNC file "{filename}" can not be found.' raise FlopyException(excpt_str) def register_exchange_file(self, package): @@ -1816,7 +1796,7 @@ def register_package( elif package.package_type.lower() == "tdis": self._tdis_file = package struct_root = mfstructure.MFStructure() - tdis_pkg = "tdis{}".format(struct_root.get_version_string()) + tdis_pkg = f"tdis{struct_root.get_version_string()}" tdis_attr = getattr(self.name_file, tdis_pkg) try: tdis_attr.set_data(package.filename) @@ -1918,7 +1898,7 @@ def register_model(self, model, model_type, model_name, model_namefile): # get model structure from model type if model_type not in self.structure.model_struct_objs: - message = 'Invalid model type: "{}".'.format(model_type) + message = f'Invalid model type: "{model_type}".' type_, value_, traceback_ = sys.exc_info() raise MFDataException( model.name, diff --git a/flopy/mf6/utils/binaryfile_utils.py b/flopy/mf6/utils/binaryfile_utils.py index cb119ada3..596ff3ffe 100644 --- a/flopy/mf6/utils/binaryfile_utils.py +++ b/flopy/mf6/utils/binaryfile_utils.py @@ -97,7 +97,7 @@ def __init__(self, mfdict, path, key): print("\nValid Keys Are:\n") for valid_key in self.dataDict: print(valid_key) - raise KeyError("Invalid key {}".format(key)) + raise KeyError(f"Invalid key {key}") def _querybinarydata(self, key): # Basic definition to get output from modflow binary files for @@ -178,33 +178,25 @@ def _get_binary_file_object(self, path, bintype, key): try: return bf.CellBudgetFile(path, precision="double") except AssertionError: - raise AssertionError( - "{} does not exist".format(self.dataDict[key]) - ) + raise AssertionError(f"{self.dataDict[key]} does not exist") elif bintype == "HDS": try: return bf.HeadFile(path, precision="double") except AssertionError: - raise AssertionError( - "{} does not exist".format(self.dataDict[key]) - ) + raise AssertionError(f"{self.dataDict[key]} does not exist") elif bintype == "DDN": try: return bf.HeadFile(path, text="drawdown", precision="double") except AssertionError: - raise AssertionError( - "{} does not exist".format(self.dataDict[key]) - ) + raise AssertionError(f"{self.dataDict[key]} does not exist") elif bintype == "UCN": try: return bf.UcnFile(path, precision="single") except AssertionError: - raise AssertionError( - "{} does not exist".format(self.dataDict[key]) - ) + raise AssertionError(f"{self.dataDict[key]} does not exist") else: raise AssertionError() diff --git a/flopy/mf6/utils/binarygrid_util.py b/flopy/mf6/utils/binarygrid_util.py index 93ba50c97..2b498ab22 100644 --- a/flopy/mf6/utils/binarygrid_util.py +++ b/flopy/mf6/utils/binarygrid_util.py @@ -67,7 +67,7 @@ def __init__(self, filename, precision="double", verbose=False): self.filename = filename if self.verbose: - print("\nProcessing binary grid file: {}".format(filename)) + print(f"\nProcessing binary grid file: {filename}") # open the grb file self.file = open(filename, "rb") @@ -118,21 +118,14 @@ def __init__(self, filename, precision="double", verbose=False): s = "" if nd > 0: s = shp - msg = " File contains data for {} ".format( - key - ) + "with shape {}".format(s) - print(msg) + print(f" File contains data for {key} with shape {s}") if self.verbose: - msg = "Attempting to read {} ".format( - self._ntxt - ) + "records from {}".format(filename) - print(msg) + print(f"Attempting to read {self._ntxt} records from {filename}") for key in self._recordkeys: if self.verbose: - msg = " Reading {}".format(key) - print(msg) + print(f" Reading {key}") dt, nd, shp = self._recorddict[key] # read array data if nd > 0: @@ -152,13 +145,9 @@ def __init__(self, filename, precision="double", verbose=False): if self.verbose: if nd == 0: - msg = " {} = {}".format(key, v) - print(msg) + print(f" {key} = {v}") else: - msg = " {}: ".format(key) + "min = {} max = {}".format( - v.min(), v.max() - ) - print(msg) + print(f" {key}: min = {v.min()} max = {v.max()}") # close the file self.file.close() @@ -254,7 +243,7 @@ def __set_modelgrid(self): ) except: - print("could not set model grid for {}".format(self.file.name)) + print(f"could not set model grid for {self.file.name}") self.__modelgrid = modelgrid @@ -302,8 +291,7 @@ def __get_iverts(self): i1 = iavert[ivert + 1] iverts.append((javert[i0:i1]).tolist()) if self.verbose: - msg = "returning iverts from {}".format(self.file.name) - print(msg) + print(f"returning iverts from {self.file.name}") return iverts def __get_verts(self): @@ -328,8 +316,7 @@ def __get_verts(self): for idx in range(shpvert[0]) ] if self.verbose: - msg = "returning verts from {}".format(self.file.name) - print(msg) + print(f"returning verts from {self.file.name}") return verts def __get_cellcenters(self): @@ -349,8 +336,7 @@ def __get_cellcenters(self): y = self._datadict["CELLY"] xycellcenters = np.column_stack((x, y)) if self.verbose: - msg = "returning cell centers from {}".format(self.file.name) - print(msg) + print(f"returning cell centers from {self.file.name}") return xycellcenters # properties diff --git a/flopy/mf6/utils/createpackages.py b/flopy/mf6/utils/createpackages.py index d2e83491d..45c4e02fd 100644 --- a/flopy/mf6/utils/createpackages.py +++ b/flopy/mf6/utils/createpackages.py @@ -19,9 +19,7 @@ class PackageLevel(Enum): def build_doc_string(param_name, param_type, param_desc, indent): - return "{}{} : {}\n{}* {}".format( - indent, param_name, param_type, indent * 2, param_desc - ) + return f"{indent}{param_name} : {param_type}\n{indent * 2}* {param_desc}" def generator_type(data_type): @@ -75,11 +73,11 @@ def build_dfn_string(dfn_list): for data_item in dfn_list: line_length += 1 if not first_di: - dfn_string = "{},\n{}".format(dfn_string, leading_spaces) + dfn_string = f"{dfn_string},\n{leading_spaces}" line_length = len(leading_spaces) else: first_di = False - dfn_string = "{}{}".format(dfn_string, "[") + dfn_string = f"{dfn_string}[" first_line = True # process each line in a data item for line in data_item: @@ -91,38 +89,34 @@ def build_dfn_string(dfn_list): line = line.replace('"', "'") line_length += len(line) + 4 if not first_line: - dfn_string = "{},".format(dfn_string) + dfn_string = f"{dfn_string}," if line_length < 77: # added text fits on the current line if first_line: - dfn_string = '{}"{}"'.format(dfn_string, line) + dfn_string = f'{dfn_string}"{line}"' else: - dfn_string = '{} "{}"'.format(dfn_string, line) + dfn_string = f'{dfn_string} "{line}"' else: # added text does not fit on the current line line_length = len(line) + len(leading_spaces) + 2 if line_length > 79: # added text too long to fit on a single line, wrap # text as needed - line = '"{}"'.format(line) + line = f'"{line}"' lines = textwrap.wrap( line, 75 - len(leading_spaces), drop_whitespace=True, ) - lines[0] = "{} {}".format(leading_spaces, lines[0]) - line_join = ' "\n{} "'.format(leading_spaces) - dfn_string = "{}\n{}".format( - dfn_string, line_join.join(lines) - ) + lines[0] = f"{leading_spaces} {lines[0]}" + line_join = f' "\n{leading_spaces} "' + dfn_string = f"{dfn_string}\n{line_join.join(lines)}" else: - dfn_string = '{}\n{} "{}"'.format( - dfn_string, leading_spaces, line - ) + dfn_string = f'{dfn_string}\n{leading_spaces} "{line}"' first_line = False - dfn_string = "{}{}".format(dfn_string, "]") - dfn_string = "{}{}".format(dfn_string, "]") + dfn_string = f"{dfn_string}]" + dfn_string = f"{dfn_string}]" return dfn_string @@ -130,38 +124,34 @@ def create_init_var(clean_ds_name, data_structure_name, init_val=None): if init_val is None: init_val = clean_ds_name - init_var = " self.{} = self.build_mfdata(".format(clean_ds_name) + init_var = f" self.{clean_ds_name} = self.build_mfdata(" leading_spaces = " " * len(init_var) if len(init_var) + len(data_structure_name) + 2 > 79: - second_line = '\n "{}",'.format(data_structure_name) + second_line = f'\n "{data_structure_name}",' if len(second_line) + len(clean_ds_name) + 2 > 79: - init_var = "{}{}\n {})".format( - init_var, second_line, init_val - ) + init_var = f"{init_var}{second_line}\n {init_val})" else: - init_var = "{}{} {})".format(init_var, second_line, init_val) + init_var = f"{init_var}{second_line} {init_val})" else: - init_var = '{}"{}",'.format(init_var, data_structure_name) + init_var = f'{init_var}"{data_structure_name}",' if len(init_var) + len(clean_ds_name) + 2 > 79: - init_var = "{}\n{}{})".format(init_var, leading_spaces, init_val) + init_var = f"{init_var}\n{leading_spaces}{init_val})" else: - init_var = "{} {})".format(init_var, init_val) + init_var = f"{init_var} {init_val})" return init_var def create_basic_init(clean_ds_name): - return " self.{} = {}\n".format(clean_ds_name, clean_ds_name) + return f" self.{clean_ds_name} = {clean_ds_name}\n" def create_property(clean_ds_name): - return " {} = property(get_{}, set_{})".format( - clean_ds_name, clean_ds_name, clean_ds_name - ) + return f" {clean_ds_name} = property(get_{clean_ds_name}, set_{clean_ds_name})" def format_var_list(base_string, var_list, is_tuple=False): if is_tuple: - base_string = "{}(".format(base_string) + base_string = f"{base_string}(" extra_chars = 4 else: extra_chars = 2 @@ -171,43 +161,39 @@ def format_var_list(base_string, var_list, is_tuple=False): for item in var_list: if line_length + len(item) + extra_chars > 80: leading_spaces = " " - base_string = "{}\n{}".format(base_string, leading_spaces) + base_string = f"{base_string}\n{leading_spaces}" line_length = len(leading_spaces) break for index, item in enumerate(var_list): if is_tuple: - item = "'{}'".format(item) + item = f"'{item}'" if index == len(var_list) - 1: next_var_str = item else: - next_var_str = "{},".format(item) + next_var_str = f"{item}," line_length += len(item) + extra_chars if line_length > 80: - base_string = "{}\n{}{}".format( - base_string, leading_spaces, next_var_str - ) + base_string = f"{base_string}\n{leading_spaces}{next_var_str}" else: if base_string[-1] == ",": - base_string = "{} ".format(base_string) - base_string = "{}{}".format(base_string, next_var_str) + base_string = f"{base_string} " + base_string = f"{base_string}{next_var_str}" if is_tuple: - return "{}))".format(base_string) + return f"{base_string}))" else: - return "{})".format(base_string) + return f"{base_string})" def create_package_init_var(parameter_name, package_abbr, data_name): - one_line = " self._{}_package = self.build_child_package(".format( - package_abbr + one_line = ( + f" self._{package_abbr}_package = self.build_child_package(" ) - one_line_b = '"{}", {},'.format(package_abbr, parameter_name) + one_line_b = f'"{package_abbr}", {parameter_name},' leading_spaces = " " * len(one_line) - two_line = '\n{}"{}",'.format(leading_spaces, data_name) - three_line = "\n{}self._{}_filerecord)".format( - leading_spaces, package_abbr - ) - return "{}{}{}{}".format(one_line, one_line_b, two_line, three_line) + two_line = f'\n{leading_spaces}"{data_name}",' + three_line = f"\n{leading_spaces}self._{package_abbr}_filerecord)" + return f"{one_line}{one_line_b}{two_line}{three_line}" def add_var( @@ -241,15 +227,13 @@ def add_var( # add to parameter list if default_value is None: default_value = "None" - init_param_list.append("{}={}".format(clean_ds_name, default_value)) + init_param_list.append(f"{clean_ds_name}={default_value}") # add to set parameter list - set_param_list.append("{}={}".format(clean_ds_name, clean_ds_name)) + set_param_list.append(f"{clean_ds_name}={clean_ds_name}") else: clean_parameter_name = datautil.clean_name(parameter_name) # init hidden variable - init_vars.append( - create_init_var("_{}".format(clean_ds_name), name, "None") - ) + init_vars.append(create_init_var(f"_{clean_ds_name}", name, "None")) # init child package init_vars.append( create_package_init_var( @@ -257,11 +241,9 @@ def add_var( ) ) # add to parameter list - init_param_list.append("{}=None".format(clean_parameter_name)) + init_param_list.append(f"{clean_parameter_name}=None") # add to set parameter list - set_param_list.append( - "{}={}".format(clean_parameter_name, clean_parameter_name) - ) + set_param_list.append(f"{clean_parameter_name}={clean_parameter_name}") package_properties.append(create_property(clean_ds_name)) doc_string.add_parameter(description, model_parameter=True) @@ -269,7 +251,7 @@ def add_var( if class_vars is not None: gen_type = generator_type(data_type) if gen_type != "ScalarTemplateGenerator": - new_class_var = " {} = {}(".format(clean_ds_name, gen_type) + new_class_var = f" {clean_ds_name} = {gen_type}(" class_vars.append(format_var_list(new_class_var, path, True)) return gen_type return None @@ -298,11 +280,11 @@ def build_init_string( ) line_chars = len(param_list[1]) + len(whitespace) + 1 continue - init_string = "{},\n{}{}".format(init_string, whitespace, param) + init_string = f"{init_string},\n{whitespace}{param}" line_chars = len(param) + len(whitespace) + 1 else: - init_string = "{}, {}".format(init_string, param) - return "{}):\n".format(init_string) + init_string = f"{init_string}, {param}" + return f"{init_string}):\n" def build_model_load(model_type): @@ -339,8 +321,7 @@ def build_model_init_vars(param_list): for param in param_list: param_parts = param.split("=") init_var_list.append( - " self.name_file.{}.set_data({}" - ")".format(param_parts[0], param_parts[0]) + f" self.name_file.{param_parts[0]}.set_data({param_parts[0]})" ) return "\n".join(init_var_list) @@ -432,7 +413,7 @@ def create_packages(): template_gens = [] dfn_string = build_dfn_string(package[3]) package_abbr = clean_class_string( - "{}{}".format(clean_class_string(package[2]), package[0].file_type) + f"{clean_class_string(package[2])}{package[0].file_type}" ).lower() package_name = clean_class_string( "{}{}{}".format( @@ -445,9 +426,7 @@ def create_packages(): doc_string = mfdatautil.MFDocString(package[0].description) else: if package[2]: - package_container_text = " within a {} model".format( - package[2] - ) + package_container_text = f" within a {package[2]} model" else: package_container_text = "" ds = "Modflow{} defines a {} package{}.".format( @@ -571,16 +550,14 @@ def create_packages(): import_string = "from .. import mfpackage" if template_gens: - import_string = "{}\nfrom ..data.mfdatautil import ".format( - import_string - ) + import_string = f"{import_string}\nfrom ..data.mfdatautil import " first_string = True for template in template_gens: if first_string: - import_string = "{}{}".format(import_string, template) + import_string = f"{import_string}{template}" first_string = False else: - import_string = "{}, {}".format(import_string, template) + import_string = f"{import_string}, {template}" # add extra docstrings for additional variables doc_string.add_parameter( " filename : String\n File name for this package." @@ -616,7 +593,7 @@ def create_packages(): ) ) init_string_full = init_string_def - init_string_model = "{}, simulation".format(init_string_def) + init_string_model = f"{init_string_def}, simulation" # add variables to init string doc_string.add_parameter( " loading_package : bool\n " @@ -635,8 +612,7 @@ def create_packages(): beginning_of_list=True, ) init_string_full = ( - "{}, simulation, loading_package=" - "False".format(init_string_full) + f"{init_string_full}, simulation, loading_package=False" ) else: doc_string.add_parameter( @@ -646,8 +622,8 @@ def create_packages(): "to model when it is initialized.", beginning_of_list=True, ) - init_string_full = "{}, model, loading_package=False".format( - init_string_full + init_string_full = ( + f"{init_string_full}, model, loading_package=False" ) init_param_list.append("filename=None") init_param_list.append("pname=None") @@ -691,9 +667,7 @@ def create_packages(): # open new Packages file pb_file = io.open( - os.path.join( - util_path, "..", "modflow", "mf{}.py".format(package_name) - ), + os.path.join(util_path, "..", "modflow", f"mf{package_name}.py"), "w", newline="\n", ) @@ -718,18 +692,15 @@ def create_packages(): "\n\nclass Utl{}Packages(mfpackage.MFChildPackage" "s):\n".format(package_short_name) ) - chld_var = ' package_abbr = "utl{}packages"\n\n'.format( - package_short_name + chld_var = ( + f' package_abbr = "utl{package_short_name}packages"\n\n' ) chld_init = " def initialize(self" chld_init = build_init_string( chld_init, init_param_list[:-1], whsp_1 ) init_pkg = "\n self._init_package(new_package, filename)" - params_init = ( - " new_package = ModflowUtl{}(" - "self._model".format(package_short_name) - ) + params_init = f" new_package = ModflowUtl{package_short_name}(self._model" params_init = build_init_string( params_init, set_param_list, whsp_2 ) @@ -755,10 +726,7 @@ def create_packages(): append_pkg = ( "\n self._append_package(new_package, filename)" ) - params_appn = ( - " new_package = ModflowUtl{}(" - "self._model".format(package_short_name) - ) + params_appn = f" new_package = ModflowUtl{package_short_name}(self._model" params_appn = build_init_string( params_appn, set_param_list, whsp_2 ) @@ -771,7 +739,7 @@ def create_packages(): chld_doc_string, package_short_name, package_short_name ) ) - chld_doc_string = '{} """\n'.format(chld_doc_string) + chld_doc_string = f'{chld_doc_string} """\n' packages_str = "{}{}{}{}{}{}{}{}{}\n".format( chld_cls, chld_doc_string, @@ -787,8 +755,7 @@ def create_packages(): pb_file.close() init_file.write( - "from .mf{} import " - "Modflow{}\n".format(package_name, package_name.title()) + f"from .mf{package_name} import Modflow{package_name.title()}\n" ) if package[0].dfn_type == mfstructure.DfnType.model_name_file: @@ -819,10 +786,10 @@ def create_packages(): beginning_of_list=True, model_parameter=True, ) - doc_string.description = "Modflow{} defines a {} model".format( - model_name, model_name + doc_string.description = ( + f"Modflow{model_name} defines a {model_name} model" ) - class_var_string = " model_type = '{}'\n".format(model_name) + class_var_string = f" model_type = '{model_name}'\n" mparent_init_string = " super().__init__(" spaces = " " * len(mparent_init_string) mparent_init_string = ( @@ -856,17 +823,14 @@ def create_packages(): load_txt, ) md_file = io.open( - os.path.join( - util_path, "..", "modflow", "mf{}.py".format(model_name) - ), + os.path.join(util_path, "..", "modflow", f"mf{model_name}.py"), "w", newline="\n", ) md_file.write(package_string) md_file.close() init_file.write( - "from .mf{} import " - "Modflow{}\n".format(model_name, model_name.capitalize()) + f"from .mf{model_name} import Modflow{model_name.capitalize()}\n" ) init_file.close() diff --git a/flopy/mf6/utils/generate_classes.py b/flopy/mf6/utils/generate_classes.py index d9ae60a3c..e21780a44 100644 --- a/flopy/mf6/utils/generate_classes.py +++ b/flopy/mf6/utils/generate_classes.py @@ -24,17 +24,17 @@ def delete_files(files, pth, allow_failure=False, exclude=None): continue fpth = os.path.join(pth, fn) try: - print(" removing...{}".format(fn)) + print(f" removing...{fn}") os.remove(fpth) except: - print("could not remove...{}".format(fn)) + print(f"could not remove...{fn}") if not allow_failure: return False return True def list_files(pth, exts=["py"]): - print("\nLIST OF FILES IN {}".format(pth)) + print(f"\nLIST OF FILES IN {pth}") files = [ entry for entry in os.listdir(pth) @@ -45,7 +45,7 @@ def list_files(pth, exts=["py"]): ext = os.path.splitext(fn)[1][1:].lower() if ext in exts: idx += 1 - print(" {:5d} - {}".format(idx, fn)) + print(f" {idx:5d} - {fn}") return @@ -66,12 +66,10 @@ def download_dfn(branch, new_dfn_pth): mf6url = "https://github.com/MODFLOW-USGS/modflow6/archive/{}.zip" mf6url = mf6url.format(branch) - print(" Downloading MODFLOW 6 repository from {}".format(mf6url)) + print(f" Downloading MODFLOW 6 repository from {mf6url}") with tempfile.TemporaryDirectory() as tmpdirname: pymake.download_and_unzip(mf6url, tmpdirname) - downloaded_dfn_pth = os.path.join( - tmpdirname, "modflow6-{}".format(branch) - ) + downloaded_dfn_pth = os.path.join(tmpdirname, f"modflow6-{branch}") downloaded_dfn_pth = os.path.join( downloaded_dfn_pth, "doc", "mf6io", "mf6ivar", "dfn" ) @@ -86,7 +84,7 @@ def backup_existing_dfns(flopy_dfn_path): shutil.copytree(flopy_dfn_path, backup_folder) assert os.path.isdir( backup_folder - ), "dfn backup files not found: {}".format(backup_folder) + ), f"dfn backup files not found: {backup_folder}" return @@ -100,7 +98,7 @@ def replace_dfn_files(new_dfn_pth, flopy_dfn_path): filenames = os.listdir(new_dfn_pth) for filename in filenames: filename_w_path = os.path.join(new_dfn_pth, filename) - print(" copying..{}".format(filename)) + print(f" copying..{filename}") shutil.copy(filename_w_path, flopy_dfn_path) @@ -146,23 +144,17 @@ def generate_classes(branch="master", dfnpath=None, backup=True): # download the dfn files and put them in flopy.mf6.data or update using # user provided dfnpath if dfnpath is None: - print( - " Updating the MODFLOW 6 classes using the branch: {}".format( - branch - ) - ) + print(f" Updating the MODFLOW 6 classes using the branch: {branch}") timestr = time.strftime("%Y%m%d-%H%M%S") - new_dfn_pth = os.path.join(flopypth, "mf6", "data", "dfn_" + timestr) + new_dfn_pth = os.path.join(flopypth, "mf6", "data", f"dfn_{timestr}") download_dfn(branch, new_dfn_pth) else: - print(" Updating the MODFLOW 6 classes using {}".format(dfnpath)) + print(f" Updating the MODFLOW 6 classes using {dfnpath}") assert os.path.isdir(dfnpath) new_dfn_pth = dfnpath if backup: - print( - " Backup existing definition files in: {}".format(flopy_dfn_path) - ) + print(f" Backup existing definition files in: {flopy_dfn_path}") backup_existing_dfns(flopy_dfn_path) print(" Replacing existing definition files with new ones.") diff --git a/flopy/mf6/utils/lakpak_utils.py b/flopy/mf6/utils/lakpak_utils.py index 332da3c4c..1b0bd51ba 100644 --- a/flopy/mf6/utils/lakpak_utils.py +++ b/flopy/mf6/utils/lakpak_utils.py @@ -88,8 +88,7 @@ def get_lak_connections(modelgrid, lake_map, idomain=None, bedleak=None): # check dimensions of idomain if idomain.shape != shape3d: raise ValueError( - "shape of idomain " - "({}) not equal to {}".format(idomain.shape, shape3d) + f"shape of idomain ({idomain.shape}) not equal to {shape3d}" ) # convert bedleak to numpy array if necessary @@ -104,8 +103,7 @@ def get_lak_connections(modelgrid, lake_map, idomain=None, bedleak=None): # check the dimensions of the bedleak array if bedleak.shape != shape2d: raise ValueError( - "shape of bedleak " - "({}) not equal to {}".format(bedleak.shape, shape2d) + f"shape of bedleak ({bedleak.shape}) not equal to {shape2d}" ) # get the model grid elevations and reset lake_map using idomain diff --git a/flopy/mf6/utils/mfobservation.py b/flopy/mf6/utils/mfobservation.py index 582b570ba..dc483441b 100644 --- a/flopy/mf6/utils/mfobservation.py +++ b/flopy/mf6/utils/mfobservation.py @@ -255,9 +255,7 @@ def get_dataframe( keys = self._key_list(keys) for key in keys: if key not in data: - raise KeyError( - "Supplied data key: {} is not valid".format(key) - ) + raise KeyError(f"Supplied data key: {key} is not valid") else: pass @@ -410,8 +408,7 @@ def __init__(self, mfdict, path, key, **kwargs): pass else: - err = "{} is not a valid dictionary key\n".format(str(key)) - raise KeyError(err) + raise KeyError(f"{key} is not a valid dictionary key") def _query_observation_data(self, modelpath, key): # get absolute path for observation data files @@ -448,7 +445,7 @@ def _check_for_observations(self): if check > 1: multi_observations = [i for i in partial_key if i == line] for i in range(len(multi_observations)): - obs8_file = "OBS8_{}".format(i + 1) + obs8_file = f"OBS8_{i + 1}" # check for single observations, continuous observations self._get_obsfile_names( multi_observations[i], obs8_file, "SINGLE" @@ -532,9 +529,7 @@ def _get_package_type(self, obstypes): return "GWF" else: - raise KeyError( - "{} is not a valid observation type".format(package) - ) + raise KeyError(f"{package} is not a valid observation type") @staticmethod def getkeys(mfdict, path): diff --git a/flopy/mf6/utils/output_util.py b/flopy/mf6/utils/output_util.py index f0e3ce2d4..0a1638be5 100644 --- a/flopy/mf6/utils/output_util.py +++ b/flopy/mf6/utils/output_util.py @@ -52,7 +52,7 @@ def __init__(self, obj): nam_file = ml.model_nam_file[:-4] self._lst = ml.name_file.blocks["options"].datasets["list"].array if self._lst is None: - self._lst = "{}.lst".format(nam_file) + self._lst = f"{nam_file}.lst" setattr(self, "list", self.__list) self._methods.append("list()") if isinstance(obj, ModelInterface): @@ -92,7 +92,7 @@ def __init__(self, obj): self, "zonebudget", methods["zonebudget"] ) self._methods.append("zonebudget()") - self._methods.append("{}()".format(rectype)) + self._methods.append(f"{rectype}()") if rectype == "obs": data = None for ky in obj._simulation_data.mfdata: @@ -125,7 +125,7 @@ def __init__(self, obj): if rectype == "package_convergence": rectype = "csv" - attr_name = "_{}".format(rectype) + attr_name = f"_{rectype}" # need a check for obs.... if data is not None: if not hasattr(self, attr_name): @@ -142,10 +142,10 @@ def __init__(self, obj): else: setattr(self, rectype, methods[rectype]) - self._methods.append("{}()".format(rectype)) + self._methods.append(f"{rectype}()") data = obj.data_list[2].data for f in data.keys(): - attr_name = "_{}".format(rectype) + attr_name = f"_{rectype}" if not hasattr(self, attr_name): setattr(self, attr_name, [f]) else: @@ -187,7 +187,7 @@ def get_layerfile_data(self, f=data, text=rectype): return setattr(self.__class__, rectype, get_layerfile_data) - self._methods.append("{}()".format(rectype)) + self._methods.append(f"{rectype}()") def methods(self): """ @@ -250,7 +250,7 @@ def __zonebudget(self, izone): is None ): grb = os.path.join( - self._sim_ws, dis.filename + ".grb" + self._sim_ws, f"{dis.filename}.grb" ) except AttributeError: pass @@ -349,10 +349,7 @@ def __mulitfile_handler(self, f, flist): else: idx = flist.index(f) if idx is None: - err = ( - "File name not found, " - "available files are {}".format(", ".join(flist)) - ) + err = f"File name not found, available files are {', '.join(flist)}" raise FileNotFoundError(err) else: filename = flist[idx] diff --git a/flopy/mf6/utils/postprocessing.py b/flopy/mf6/utils/postprocessing.py index d7212525a..bf4bee94b 100644 --- a/flopy/mf6/utils/postprocessing.py +++ b/flopy/mf6/utils/postprocessing.py @@ -152,6 +152,5 @@ def __check_flowja_size(flowja, ja): """ if flowja.shape != ja.shape: raise ValueError( - "size of flowja ({}) not equal to " - "{}".format(flowja.shape, ja.shape) + f"size of flowja ({flowja.shape}) not equal to {ja.shape}" ) diff --git a/flopy/mf6/utils/reference.py b/flopy/mf6/utils/reference.py index 39c4f8185..7c6044751 100644 --- a/flopy/mf6/utils/reference.py +++ b/flopy/mf6/utils/reference.py @@ -79,7 +79,7 @@ def __init__( yul=None, rotation=0.0, proj4_str=None, - **kwargs + **kwargs, ): self.delc = np.atleast_1d(np.array(delc)) self.delr = np.atleast_1d(np.array(delr)) @@ -260,10 +260,8 @@ def set_spatialreference(self, xul=None, yul=None, rotation=0.0): self._reset() def __repr__(self): - s = "xul:{0: 0: f_nam.write( @@ -501,13 +497,11 @@ def write_name_file(self): if o: replace_text = " REPLACE" if b: - line = "DATA(BINARY) {:5d} {}{}\n".format( - u, f, replace_text - ) + line = f"DATA(BINARY) {u:5d} {f}{replace_text}\n" f_nam.write(line) else: - f_nam.write("DATA {:5d} {}\n".format(u, f)) + f_nam.write(f"DATA {u:5d} {f}\n") # write the output files for u, f, b in zip( @@ -516,9 +510,9 @@ def write_name_file(self): if u == 0: continue if b: - f_nam.write("DATA(BINARY) {:5d} {} REPLACE\n".format(u, f)) + f_nam.write(f"DATA(BINARY) {u:5d} {f} REPLACE\n") else: - f_nam.write("DATA {:5d} {}\n".format(u, f)) + f_nam.write(f"DATA {u:5d} {f}\n") # close the name file f_nam.close() @@ -619,20 +613,11 @@ def load_results(self, **kwargs): if v.lower() == "save budget": savebud = True except Exception as e: - print( - "error reading output filenames " - + "from OC package: {}".format(str(e)) - ) + print(f"error reading output filenames from OC package: {e!s}") - self.hpth = os.path.join( - self.model_ws, "{}.{}".format(self.name, self.hext) - ) - self.dpth = os.path.join( - self.model_ws, "{}.{}".format(self.name, self.dext) - ) - self.cpth = os.path.join( - self.model_ws, "{}.{}".format(self.name, self.cext) - ) + self.hpth = os.path.join(self.model_ws, f"{self.name}.{self.hext}") + self.dpth = os.path.join(self.model_ws, f"{self.name}.{self.dext}") + self.cpth = os.path.join(self.model_ws, f"{self.name}.{self.cext}") hdObj = None ddObj = None @@ -657,7 +642,7 @@ def load_results(self, **kwargs): text="subsidence", ) except Exception as e: - print("error loading subsidence.hds:{0}".format(str(e))) + print(f"error loading subsidence.hds:{e!s}") if as_dict: oudic = {} @@ -727,11 +712,11 @@ def load( # similar to modflow command: if file does not exist , try file.nam namefile_path = os.path.join(model_ws, f) if not os.path.isfile(namefile_path) and os.path.isfile( - namefile_path + ".nam" + f"{namefile_path}.nam" ): namefile_path += ".nam" if not os.path.isfile(namefile_path): - raise IOError("cannot find name file: " + str(namefile_path)) + raise IOError(f"cannot find name file: {namefile_path}") # Determine model name from 'f', without any extension or path modelname = os.path.splitext(os.path.basename(f))[0] @@ -739,11 +724,7 @@ def load( # if model_ws is None: # model_ws = os.path.dirname(f) if verbose: - print( - "\nCreating new model with name: {}\n{}\n".format( - modelname, 50 * "-" - ) - ) + print(f"\nCreating new model with name: {modelname}\n{50 * '-'}\n") attribs = mfreadnam.attribs_from_namfile_header( os.path.join(model_ws, f) @@ -755,7 +736,7 @@ def load( exe_name=exe_name, verbose=verbose, model_ws=model_ws, - **attribs + **attribs, ) files_successfully_loaded = [] @@ -821,7 +802,7 @@ def load( ml.free_format_input = True bas.filehandle.seek(start) if verbose: - print("ModflowBas6 free format:{0}\n".format(ml.free_format_input)) + print(f"ModflowBas6 free format:{ml.free_format_input}\n") # load dis dis_key = ext_pkg_d.get("DIS") or ext_pkg_d.get("DISU") @@ -833,7 +814,7 @@ def load( ) files_successfully_loaded.append(disnamdata.filename) if ml.verbose: - print(" {:4s} package load...success".format(dis.name[0])) + print(f" {dis.name[0]:4s} package load...success") assert ml.pop_key_list.pop() == dis_key ext_unit_dict.pop(dis_key).filehandle.close() @@ -890,19 +871,15 @@ def load( files_successfully_loaded.append(item.filename) if ml.verbose: print( - " {:4s} package load...success".format( - item.filetype - ) + f" {item.filetype:4s} package load...success" ) except Exception as e: ml.load_fail = True if ml.verbose: print( - " {:4s} package load...failed".format( - item.filetype - ) + f" {item.filetype:4s} package load...failed" ) - print(" {!s}".format(e)) + print(f" {e!s}") files_not_loaded.append(item.filename) else: if "check" in package_load_args: @@ -921,30 +898,20 @@ def load( files_successfully_loaded.append(item.filename) if ml.verbose: print( - " {:4s} package load...success".format( - item.filetype - ) + f" {item.filetype:4s} package load...success" ) else: if ml.verbose: - print( - " {:4s} package load...skipped".format( - item.filetype - ) - ) + print(f" {item.filetype:4s} package load...skipped") files_not_loaded.append(item.filename) elif "data" not in item.filetype.lower(): files_not_loaded.append(item.filename) if ml.verbose: - print( - " {:4s} package load...skipped".format(item.filetype) - ) + print(f" {item.filetype:4s} package load...skipped") elif "data" in item.filetype.lower(): if ml.verbose: - print( - " {:s} package load...skipped".format(item.filetype) - ) - print(" {}".format(os.path.basename(item.filename))) + print(f" {item.filetype} package load...skipped") + print(f" {os.path.basename(item.filename)}") if key not in ml.pop_key_list: # do not add unit number (key) if it already exists if key not in ml.external_units: @@ -955,7 +922,7 @@ def load( ) ml.external_output.append(False) else: - raise KeyError("unhandled case: {}, {}".format(key, item)) + raise KeyError(f"unhandled case: {key}, {item}") # pop binary output keys and any external file units that are now # internal @@ -968,30 +935,28 @@ def load( except KeyError: if ml.verbose: print( - "\nWARNING:\n External file unit {} does not " - "exist in ext_unit_dict.".format(key) + f"\nWARNING:\n External file unit {key} does not " + "exist in ext_unit_dict." ) # write message indicating packages that were successfully loaded if ml.verbose: print("") print( - " The following {} packages were successfully loaded.".format( - len(files_successfully_loaded) - ) + f" The following {len(files_successfully_loaded)} packages " + "were successfully loaded." ) for fname in files_successfully_loaded: - print(" " + os.path.basename(fname)) + print(f" {os.path.basename(fname)}") if len(files_not_loaded) > 0: print( - " The following {} packages were not loaded.".format( - len(files_not_loaded) - ) + " The following {len(files_not_loaded)} packages " + "were not loaded." ) for fname in files_not_loaded: - print(" " + os.path.basename(fname)) + print(f" {os.path.basename(fname)}") if check: - ml.check(f="{}.chk".format(ml.name), verbose=ml.verbose, level=0) + ml.check(f=f"{ml.name}.chk", verbose=ml.verbose, level=0) # return model object return ml diff --git a/flopy/modflow/mfag.py b/flopy/modflow/mfag.py index 9fa000b62..6499af5f6 100644 --- a/flopy/modflow/mfag.py +++ b/flopy/modflow/mfag.py @@ -347,9 +347,7 @@ def write_file(self, check=False): for record in self.time_series: if record["keyword"] in ("welletall", "wellall"): foo.write( - "{} {:d}\n".format( - record["keyword"], record["unit"] - ).upper() + f"{record['keyword']} {record['unit']}\n".upper() ) else: foo.write(fmt.format(*record).upper()) @@ -361,7 +359,7 @@ def write_file(self, check=False): foo.write("# segment list for irriagation diversions\n") foo.write("SEGMENT LIST\n") for iseg in self.segment_list: - foo.write("{:d}\n".format(iseg)) + foo.write(f"{iseg}\n") foo.write("END \n") @@ -403,7 +401,7 @@ def write_file(self, check=False): foo.write("# ag stress period data\n") for per in range(self._nper): - foo.write("STRESS PERIOD {}\n".format(per + 1)) + foo.write(f"STRESS PERIOD {per + 1}\n") # check for item 18 and write items 18 - 21 if self.irrdiversion is not None: @@ -425,7 +423,7 @@ def write_file(self, check=False): recarray = self.irrdiversion[per] # write item 19 - foo.write("{:d} \n".format(len(recarray))) + foo.write(f"{len(recarray)} \n") fmt21 = "{:d} {:d} {:f} {:f}\n" for rec in recarray: @@ -449,10 +447,10 @@ def write_file(self, check=False): for i in range(num): foo.write( fmt21.format( - rec["i{}".format(i)] + 1, - rec["j{}".format(i)] + 1, - rec["eff_fact{}".format(i)], - rec["field_fact{}".format(i)], + rec[f"i{i}"] + 1, + rec[f"j{i}"] + 1, + rec[f"eff_fact{i}"], + rec[f"field_fact{i}"], ) ) @@ -478,7 +476,7 @@ def write_file(self, check=False): recarray = self.irrwell[per] # write item 23 - foo.write("{:d} \n".format(len(recarray))) + foo.write(f"{len(recarray)} \n") fmt25 = "{:d} {:d} {:f} {:f}\n" for rec in recarray: @@ -502,10 +500,10 @@ def write_file(self, check=False): for i in range(num): foo.write( fmt25.format( - rec["i{}".format(i)] + 1, - rec["j{}".format(i)] + 1, - rec["eff_fact{}".format(i)], - rec["field_fact{}".format(i)], + rec[f"i{i}"] + 1, + rec[f"j{i}"] + 1, + rec[f"eff_fact{i}"], + rec[f"field_fact{i}"], ) ) else: @@ -525,7 +523,7 @@ def write_file(self, check=False): recarray = self.supwell[per] # write item 27 - foo.write("{:d} \n".format(len(recarray))) + foo.write(f"{len(recarray)} \n") for rec in recarray: num = rec["numcell"] @@ -537,7 +535,7 @@ def write_file(self, check=False): ) for i in range(num): - if rec["fracsupmax{}".format(i)] != -1e10: + if rec[f"fracsupmax{i}"] != -1e10: foo.write( "{:d} {:f} {:f}\n".format( rec["segid{}".format(i)], @@ -643,10 +641,10 @@ def get_default_dtype(maxells=0, block="well"): for i in range(maxells): dtype += [ - ("i{}".format(i), int), - ("j{}".format(i), int), - ("eff_fact{}".format(i), float), - ("field_fact{}".format(i), float), + (f"i{i}", int), + (f"j{i}", int), + (f"eff_fact{i}", float), + (f"field_fact{i}", float), ] elif block == "irrwell": @@ -659,10 +657,10 @@ def get_default_dtype(maxells=0, block="well"): for i in range(maxells): dtype += [ - ("i{}".format(i), int), - ("j{}".format(i), int), - ("eff_fact{}".format(i), float), - ("field_fact{}".format(i), float), + (f"i{i}", int), + (f"j{i}", int), + (f"eff_fact{i}", float), + (f"field_fact{i}", float), ] elif block == "supwell": @@ -670,15 +668,13 @@ def get_default_dtype(maxells=0, block="well"): for i in range(maxells): dtype += [ - ("segid{}".format(i), int), - ("fracsup{}".format(i), float), - ("fracsupmax{}".format(i), float), + (f"segid{i}", int), + (f"fracsup{i}", float), + (f"fracsupmax{i}", float), ] else: - raise NotImplementedError( - "block type {}, not supported".format(block) - ) + raise NotImplementedError(f"block type {block}, not supported") return np.dtype(dtype) @@ -884,9 +880,7 @@ def load(cls, f, model, nper=0, ext_unit_dict=None): break else: - raise ValueError( - "Something went wrong at: {}".format(line) - ) + raise ValueError(f"Something went wrong at: {line}") return cls( model, diff --git a/flopy/modflow/mfbas.py b/flopy/modflow/mfbas.py index c28a3b225..dd903047f 100644 --- a/flopy/modflow/mfbas.py +++ b/flopy/modflow/mfbas.py @@ -248,15 +248,14 @@ def write_file(self, check=True): # allows turning off package checks when writing files at model level if check: self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) # Open file for writing f_bas = open(self.fn_path, "w") # First line: heading - # f_bas.write('%s\n' % self.heading) - f_bas.write("{0:s}\n".format(self.heading)) + f_bas.write(f"{self.heading}\n") # Second line: format specifier opts = [] if self.ixsec: @@ -266,7 +265,7 @@ def write_file(self, check=True): if self.ifrefm: opts.append("FREE") if self.stoper is not None: - opts.append("STOPERROR {0}".format(self.stoper)) + opts.append(f"STOPERROR {self.stoper}") self.options = " ".join(opts) f_bas.write(self.options + "\n") # IBOUND array @@ -275,7 +274,7 @@ def write_file(self, check=True): str_hnoflo = str(self.hnoflo).rjust(10) if not self.ifrefm and len(str_hnoflo) > 10: # write fixed-width no more than 10 characters - str_hnoflo = "{0:10.4G}".format(self.hnoflo) + str_hnoflo = f"{self.hnoflo:10.4G}" assert len(str_hnoflo) <= 10, str_hnoflo f_bas.write(str_hnoflo + "\n") # Starting heads array @@ -409,7 +408,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True, **kwargs): ) if check: bas.check( - f="{}.chk".format(bas.name[0]), + f=f"{bas.name[0]}.chk", verbose=bas.parent.verbose, level=0, ) diff --git a/flopy/modflow/mfbcf.py b/flopy/modflow/mfbcf.py index 5dbf48baf..941ce4851 100644 --- a/flopy/modflow/mfbcf.py +++ b/flopy/modflow/mfbcf.py @@ -282,22 +282,14 @@ def write_file(self, f=None): for k in range(nlay): if ifrefm: if self.intercellt[k] > 0: - f_bcf.write( - "{0:1d}{1:1d} ".format( - self.intercellt[k], self.laycon[k] - ) - ) + f_bcf.write(f"{self.intercellt[k]:1d}{self.laycon[k]:1d} ") else: - f_bcf.write("0{0:1d} ".format(self.laycon[k])) + f_bcf.write(f"0{self.laycon[k]:1d} ") else: if self.intercellt[k] > 0: - f_bcf.write( - "{0:1d}{1:1d}".format( - self.intercellt[k], self.laycon[k] - ) - ) + f_bcf.write(f"{self.intercellt[k]:1d}{self.laycon[k]:1d}") else: - f_bcf.write("0{0:1d}".format(self.laycon[k])) + f_bcf.write(f"0{self.laycon[k]:1d}") f_bcf.write("\n") f_bcf.write(self.trpy.get_file_entry()) transient = not dis.steady.all() @@ -463,7 +455,7 @@ def load(cls, f, model, ext_unit_dict=None): # sf1 if transient: if model.verbose: - print(" loading sf1 layer {0:3d}...".format(k + 1)) + print(f" loading sf1 layer {k + 1:3d}...") t = Util2d.load( f, model, (nrow, ncol), np.float32, "sf1", ext_unit_dict ) @@ -472,14 +464,14 @@ def load(cls, f, model, ext_unit_dict=None): # tran or hy if (laycon[k] == 0) or (laycon[k] == 2): if model.verbose: - print(" loading tran layer {0:3d}...".format(k + 1)) + print(f" loading tran layer {k + 1:3d}...") t = Util2d.load( f, model, (nrow, ncol), np.float32, "tran", ext_unit_dict ) tran[k] = t else: if model.verbose: - print(" loading hy layer {0:3d}...".format(k + 1)) + print(f" loading hy layer {k + 1:3d}...") t = Util2d.load( f, model, (nrow, ncol), np.float32, "hy", ext_unit_dict ) @@ -488,7 +480,7 @@ def load(cls, f, model, ext_unit_dict=None): # vcont if k < (nlay - 1): if model.verbose: - print(" loading vcont layer {0:3d}...".format(k + 1)) + print(f" loading vcont layer {k + 1:3d}...") t = Util2d.load( f, model, (nrow, ncol), np.float32, "vcont", ext_unit_dict ) @@ -497,7 +489,7 @@ def load(cls, f, model, ext_unit_dict=None): # sf2 if transient and ((laycon[k] == 2) or (laycon[k] == 3)): if model.verbose: - print(" loading sf2 layer {0:3d}...".format(k + 1)) + print(f" loading sf2 layer {k + 1:3d}...") t = Util2d.load( f, model, (nrow, ncol), np.float32, "sf2", ext_unit_dict ) @@ -506,7 +498,7 @@ def load(cls, f, model, ext_unit_dict=None): # wetdry if (iwdflg != 0) and ((laycon[k] == 1) or (laycon[k] == 3)): if model.verbose: - print(" loading sf2 layer {0:3d}...".format(k + 1)) + print(f" loading sf2 layer {k + 1:3d}...") t = Util2d.load( f, model, (nrow, ncol), np.float32, "wetdry", ext_unit_dict ) diff --git a/flopy/modflow/mfchd.py b/flopy/modflow/mfchd.py index 3cdddffa3..e001c5737 100644 --- a/flopy/modflow/mfchd.py +++ b/flopy/modflow/mfchd.py @@ -110,7 +110,7 @@ def __init__( extension="chd", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): # set default unit number if one is not specified @@ -181,8 +181,8 @@ def write_file(self): """ f_chd = open(self.fn_path, "w") - f_chd.write("{0:s}\n".format(self.heading)) - f_chd.write(" {0:9d}".format(self.stress_period_data.mxact)) + f_chd.write(f"{self.heading}\n") + f_chd.write(f" {self.stress_period_data.mxact:9d}") for option in self.options: f_chd.write(" {}".format(option)) f_chd.write("\n") @@ -193,7 +193,7 @@ def add_record(self, kper, index, values): try: self.stress_period_data.add_record(kper, index, values) except Exception as e: - raise Exception("mfchd error adding record to list: " + str(e)) + raise Exception(f"mfchd error adding record to list: {e!s}") @staticmethod def get_empty(ncells=0, aux_names=None, structured=True): diff --git a/flopy/modflow/mfde4.py b/flopy/modflow/mfde4.py index 66ea3f555..0753a9f40 100644 --- a/flopy/modflow/mfde4.py +++ b/flopy/modflow/mfde4.py @@ -195,31 +195,31 @@ def write_file(self): """ # Open file for writing f = open(self.fn_path, "w") - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") ifrfm = self.parent.get_ifrefm() if ifrfm: - f.write("{} ".format(self.itmx)) - f.write("{} ".format(self.mxup)) - f.write("{} ".format(self.mxlow)) - f.write("{} ".format(self.mxbw)) + f.write(f"{self.itmx} ") + f.write(f"{self.mxup} ") + f.write(f"{self.mxlow} ") + f.write(f"{self.mxbw} ") f.write("\n") - f.write("{} ".format(self.ifreq)) - f.write("{} ".format(self.mutd4)) - f.write("{} ".format(self.accl)) - f.write("{} ".format(self.hclose)) - f.write("{} ".format(self.iprd4)) + f.write(f"{self.ifreq} ") + f.write(f"{self.mutd4} ") + f.write(f"{self.accl} ") + f.write(f"{self.hclose} ") + f.write(f"{self.iprd4} ") f.write("\n") else: - f.write("{:10d}".format(self.itmx)) - f.write("{:10d}".format(self.mxup)) - f.write("{:10d}".format(self.mxlow)) - f.write("{:10d}".format(self.mxbw)) + f.write(f"{self.itmx:10d}") + f.write(f"{self.mxup:10d}") + f.write(f"{self.mxlow:10d}") + f.write(f"{self.mxbw:10d}") f.write("\n") - f.write("{:10d}".format(self.ifreq)) - f.write("{:10d}".format(self.mutd4)) - f.write("{:9.4e} ".format(self.accl)) - f.write("{:9.4e} ".format(self.hclose)) - f.write("{:10d}".format(self.iprd4)) + f.write(f"{self.ifreq:10d}") + f.write(f"{self.mutd4:10d}") + f.write(f"{self.accl:9.4e} ") + f.write(f"{self.hclose:9.4e} ") + f.write(f"{self.iprd4:10d}") f.write("\n") f.close() diff --git a/flopy/modflow/mfdis.py b/flopy/modflow/mfdis.py index 424da51ce..d895e466b 100644 --- a/flopy/modflow/mfdis.py +++ b/flopy/modflow/mfdis.py @@ -423,8 +423,7 @@ def get_totim_from_kper_toffset( kper = 0.0 if kper >= self.nper: raise ValueError( - "kper ({}) must be less than " - "to nper ({}).".format(kper, self.nper) + f"kper ({kper}) must be less than to nper ({self.nper})." ) totim = self.get_totim(use_cached_totim) @@ -614,14 +613,14 @@ def write_file(self, check=True): check ): # allows turning off package checks when writing files at model level self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) # Open file for writing f_dis = open(self.fn_path, "w") # Item 0: heading - f_dis.write("{0:s}\n".format(self.heading)) + f_dis.write(f"{self.heading}\n") # Item 1: NLAY, NROW, NCOL, NPER, ITMUNI, LENUNI f_dis.write( "{0:10d}{1:10d}{2:10d}{3:10d}{4:10d}{5:10d}\n".format( @@ -635,7 +634,7 @@ def write_file(self, check=True): ) # Item 2: LAYCBD for l in range(0, self.nlay): - f_dis.write("{0:3d}".format(self.laycbd[l])) + f_dis.write(f"{self.laycbd[l]:3d}") f_dis.write("\n") # Item 3: DELR f_dis.write(self.delr.get_file_entry()) @@ -649,9 +648,7 @@ def write_file(self, check=True): # Item 6: NPER, NSTP, TSMULT, Ss/tr for t in range(self.nper): f_dis.write( - "{0:14f}{1:14d}{2:10f} ".format( - self.perlen[t], self.nstp[t], self.tsmult[t] - ) + f"{self.perlen[t]:14f}{self.nstp[t]:14d}{self.tsmult[t]:10f} " ) if self.steady[t]: f_dis.write(" {0:3s}\n".format("SS")) @@ -793,43 +790,35 @@ def load(cls, f, model, ext_unit_dict=None, check=True): xul = float(item.split(":")[1]) except: if model.verbose: - print(" could not parse xul in {}".format(filename)) + print(f" could not parse xul in {filename}") dep = True elif "yul" in item.lower(): try: yul = float(item.split(":")[1]) except: if model.verbose: - print(" could not parse yul in {}".format(filename)) + print(f" could not parse yul in {filename}") dep = True elif "rotation" in item.lower(): try: rotation = float(item.split(":")[1]) except: if model.verbose: - print( - " could not parse rotation " - "in {}".format(filename) - ) + print(f" could not parse rotation in {filename}") dep = True elif "proj4_str" in item.lower(): try: proj4_str = ":".join(item.split(":")[1:]).strip() except: if model.verbose: - print( - " could not parse proj4_str " - "in {}".format(filename) - ) + print(f" could not parse proj4_str in {filename}") dep = True elif "start" in item.lower(): try: start_datetime = item.split(":")[1].strip() except: if model.verbose: - print( - " could not parse start in {}".format(filename) - ) + print(f" could not parse start in {filename}") dep = True if dep: warnings.warn( @@ -888,9 +877,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): ncbd = laycbd.sum() if model.verbose: print(" loading botm...") - print( - " for {} layers and {} confining beds".format(nlay, ncbd) - ) + print(f" for {nlay} layers and {ncbd} confining beds") if nlay > 1: botm = Util3d.load( f, @@ -907,7 +894,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # dataset 7 -- stress period info if model.verbose: print(" loading stress period data...") - print(" for {} stress periods".format(nper)) + print(f" for {nper} stress periods") perlen = [] nstp = [] tsmult = [] @@ -966,7 +953,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): ) if check: dis.check( - f="{}.chk".format(dis.name[0]), + f=f"{dis.name[0]}.chk", verbose=dis.parent.verbose, level=0, ) diff --git a/flopy/modflow/mfdisu.py b/flopy/modflow/mfdisu.py index c0ea5ec62..20f77fa3f 100644 --- a/flopy/modflow/mfdisu.py +++ b/flopy/modflow/mfdisu.py @@ -598,19 +598,19 @@ def load(cls, f, model, ext_unit_dict=None, check=True): else: idsymrd = 0 if model.verbose: - print(" NODES {}".format(nodes)) - print(" NLAY {}".format(nlay)) - print(" NJAG {}".format(njag)) - print(" IVSD {}".format(ivsd)) - print(" NPER {}".format(nper)) - print(" ITMUNI {}".format(itmuni)) - print(" LENUNI {}".format(lenuni)) - print(" IDSYMRD {}".format(idsymrd)) + print(f" NODES {nodes}") + print(f" NLAY {nlay}") + print(f" NJAG {njag}") + print(f" IVSD {ivsd}") + print(f" NPER {nper}") + print(f" ITMUNI {itmuni}") + print(f" LENUNI {lenuni}") + print(f" IDSYMRD {idsymrd}") # Calculate njags njags = int((njag - nodes) / 2) if model.verbose: - print(" NJAGS calculated as {}".format(njags)) + print(f" NJAGS calculated as {njags}") # dataset 2 -- laycbd if model.verbose: @@ -618,7 +618,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): laycbd = np.empty((nlay,), np.int32) laycbd = read1d(f, laycbd) if model.verbose: - print(" LAYCBD {}".format(laycbd)) + print(f" LAYCBD {laycbd}") # dataset 3 -- nodelay if model.verbose: @@ -627,7 +627,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): f, model, (nlay,), np.int32, "nodelay", ext_unit_dict ) if model.verbose: - print(" NODELAY {}".format(nodelay)) + print(f" NODELAY {nodelay}") # dataset 4 -- top if model.verbose: @@ -640,7 +640,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): top[k] = tpk if model.verbose: for k, tpk in enumerate(top): - print(" TOP layer {}: {}".format(k, tpk.array)) + print(f" TOP layer {k}: {tpk.array}") # dataset 5 -- bot if model.verbose: @@ -653,7 +653,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): bot[k] = btk if model.verbose: for k, btk in enumerate(bot): - print(" BOT layer {}: {}".format(k, btk.array)) + print(f" BOT layer {k}: {btk.array}") # dataset 6 -- area if model.verbose: @@ -671,21 +671,21 @@ def load(cls, f, model, ext_unit_dict=None, check=True): area[k] = ak if model.verbose: for k, ak in enumerate(area): - print(" AREA layer {}: {}".format(k, ak)) + print(f" AREA layer {k}: {ak}") # dataset 7 -- iac if model.verbose: print(" loading IAC...") iac = Util2d.load(f, model, (nodes,), np.int32, "iac", ext_unit_dict) if model.verbose: - print(" IAC {}".format(iac)) + print(f" IAC {iac}") # dataset 8 -- ja if model.verbose: print(" loading JA...") ja = Util2d.load(f, model, (njag,), np.int32, "ja", ext_unit_dict) if model.verbose: - print(" JA {}".format(ja)) + print(f" JA {ja}") # dataset 9 -- ivc ivc = None @@ -696,7 +696,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): f, model, (njag,), np.int32, "ivc", ext_unit_dict ) if model.verbose: - print(" IVC {}".format(ivc)) + print(f" IVC {ivc}") # dataset 10a -- cl1 cl1 = None @@ -707,7 +707,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): f, model, (njags,), np.float32, "cl1", ext_unit_dict ) if model.verbose: - print(" CL1 {}".format(cl1)) + print(f" CL1 {cl1}") # dataset 10b -- cl2 cl2 = None @@ -718,7 +718,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): f, model, (njags,), np.float32, "cl2", ext_unit_dict ) if model.verbose: - print(" CL2 {}".format(cl2)) + print(f" CL2 {cl2}") # dataset 11 -- cl12 cl12 = None @@ -729,7 +729,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): f, model, (njag,), np.float32, "cl12", ext_unit_dict ) if model.verbose: - print(" CL12 {}".format(cl12)) + print(f" CL12 {cl12}") # dataset 12 -- fahl fahl = None @@ -741,7 +741,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): print(" loading FAHL...") fahl = Util2d.load(f, model, (n,), np.float32, "fahl", ext_unit_dict) if model.verbose: - print(" FAHL {}".format(fahl)) + print(f" FAHL {fahl}") # dataset 7 -- stress period info if model.verbose: @@ -765,10 +765,10 @@ def load(cls, f, model, ext_unit_dict=None, check=True): tsmult.append(a3) steady.append(a4) if model.verbose: - print(" PERLEN {}".format(perlen)) - print(" NSTP {}".format(nstp)) - print(" TSMULT {}".format(tsmult)) - print(" STEADY {}".format(steady)) + print(f" PERLEN {perlen}") + print(f" NSTP {nstp}") + print(f" TSMULT {tsmult}") + print(f" STEADY {steady}") if openfile: f.close() @@ -828,7 +828,7 @@ def write_file(self): f_dis = open(self.fn_path, "w") # Item 0: heading - f_dis.write("{0:s}\n".format(self.heading)) + f_dis.write(f"{self.heading}\n") # Item 1: NODES NLAY NJAG IVSD NPER ITMUNI LENUNI IDSYMRD s = "" @@ -847,7 +847,7 @@ def write_file(self): # Item 2: LAYCBD for k in range(self.nlay): - f_dis.write("{0:3d}".format(self.laycbd[k])) + f_dis.write(f"{self.laycbd[k]:3d}") f_dis.write("\n") # Item 3: NODELAY @@ -890,9 +890,7 @@ def write_file(self): # Item 13: NPER, NSTP, TSMULT, Ss/tr for t in range(self.nper): f_dis.write( - "{0:14f}{1:14d}{2:10f} ".format( - self.perlen[t], self.nstp[t], self.tsmult[t] - ) + f"{self.perlen[t]:14f}{self.nstp[t]:14d}{self.tsmult[t]:10f} " ) if self.steady[t]: f_dis.write(" {0:3s}\n".format("SS")) diff --git a/flopy/modflow/mfdrn.py b/flopy/modflow/mfdrn.py index edfa877bc..cf3728a85 100644 --- a/flopy/modflow/mfdrn.py +++ b/flopy/modflow/mfdrn.py @@ -117,7 +117,7 @@ def __init__( unitnumber=None, options=None, filenames=None, - **kwargs + **kwargs, ): # set default unit number of one is not specified @@ -250,16 +250,13 @@ def write_file(self, check=True): check ): # allows turning off package checks when writing files at model level self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) f_drn = open(self.fn_path, "w") - f_drn.write("{0}\n".format(self.heading)) - # f_drn.write('%10i%10i\n' % (self.mxactd, self.idrncb)) - line = "{0:10d}{1:10d}".format( - self.stress_period_data.mxact, self.ipakcb - ) + f_drn.write(f"{self.heading}\n") + line = f"{self.stress_period_data.mxact:10d}{self.ipakcb:10d}" if self.is_drt: line += "{0:10d}{0:10d}".format(0) @@ -274,7 +271,7 @@ def add_record(self, kper, index, values): try: self.stress_period_data.add_record(kper, index, values) except Exception as e: - raise Exception("mfdrn error adding record to list: " + str(e)) + raise Exception(f"mfdrn error adding record to list: {e!s}") @staticmethod def get_empty(ncells=0, aux_names=None, structured=True, is_drt=False): diff --git a/flopy/modflow/mfdrt.py b/flopy/modflow/mfdrt.py index dceee07cf..97dfdaebd 100644 --- a/flopy/modflow/mfdrt.py +++ b/flopy/modflow/mfdrt.py @@ -115,7 +115,7 @@ def __init__( unitnumber=None, options=None, filenames=None, - **kwargs + **kwargs, ): # set default unit number of one is not specified @@ -245,16 +245,13 @@ def write_file(self, check=True): check ): # allows turning off package checks when writing files at model level self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) f_drn = open(self.fn_path, "w") - f_drn.write("{0}\n".format(self.heading)) - # f_drn.write('%10i%10i\n' % (self.mxactd, self.idrncb)) - line = "{0:10d}{1:10d}{2:10d}{3:10d}".format( - self.stress_period_data.mxact, self.ipakcb, 0, 0 - ) + f_drn.write(f"{self.heading}\n") + line = f"{self.stress_period_data.mxact:10d}{self.ipakcb:10d}{0:10d}{0:10d}" for opt in self.options: line += " " + str(opt) line += "\n" @@ -266,7 +263,7 @@ def add_record(self, kper, index, values): try: self.stress_period_data.add_record(kper, index, values) except Exception as e: - raise Exception("mfdrt error adding record to list: " + str(e)) + raise Exception(f"mfdrt error adding record to list: {e!s}") @staticmethod def get_empty(ncells=0, aux_names=None, structured=True, is_drt=False): diff --git a/flopy/modflow/mfevt.py b/flopy/modflow/mfevt.py index 65f616aa7..fc413c507 100644 --- a/flopy/modflow/mfevt.py +++ b/flopy/modflow/mfevt.py @@ -199,8 +199,8 @@ def write_file(self, f=None): f_evt = f else: f_evt = open(self.fn_path, "w") - f_evt.write("{0:s}\n".format(self.heading)) - f_evt.write("{0:10d}{1:10d}\n".format(self.nevtop, self.ipakcb)) + f_evt.write(f"{self.heading}\n") + f_evt.write(f"{self.nevtop:10d}{self.ipakcb:10d}\n") if self.nevtop == 2 and not self.parent.structured: mxndevt = np.max( [ @@ -208,7 +208,7 @@ def write_file(self, f=None): for kper, u2d in self.ievt.transient_2ds.items() ] ) - f_evt.write("{0:10d}\n".format(mxndevt)) + f_evt.write(f"{mxndevt:10d}\n") for n in range(nper): insurf, surf = self.surf.get_kper_entry(n) @@ -217,13 +217,9 @@ def write_file(self, f=None): inievt, ievt = self.ievt.get_kper_entry(n) if self.nevtop == 2 and not self.parent.structured: inievt = self.ievt[n].array.size - comment = "Evapotranspiration dataset 5 for stress period " + str( - n + 1 - ) + comment = f"Evapotranspiration dataset 5 for stress period {n + 1}" f_evt.write( - "{0:10d}{1:10d}{2:10d}{3:10d} # {4:s}\n".format( - insurf, inevtr, inexdp, inievt, comment - ) + f"{insurf:10d}{inevtr:10d}{inexdp:10d}{inievt:10d} # {comment}\n" ) if insurf >= 0: f_evt.write(surf) @@ -342,11 +338,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if insurf >= 0: if model.verbose: - print( - " loading surf stress period {0:3d}...".format( - iper + 1 - ) - ) + print(f" loading surf stress period {iper + 1:3d}...") t = Util2d.load( f, model, u2d_shape, np.float32, "surf", ext_unit_dict ) @@ -357,9 +349,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if npar == 0: if model.verbose: print( - " loading evtr stress period {0:3d}...".format( - iper + 1 - ) + f" loading evtr stress period {iper + 1:3d}..." ) t = Util2d.load( f, @@ -396,11 +386,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): evtr[iper] = current_evtr if inexdp >= 0: if model.verbose: - print( - " loading exdp stress period {0:3d}...".format( - iper + 1 - ) - ) + print(f" loading exdp stress period {iper + 1:3d}...") t = Util2d.load( f, model, u2d_shape, np.float32, "exdp", ext_unit_dict ) @@ -410,9 +396,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if inievt >= 0: if model.verbose: print( - " loading ievt stress period {0:3d}...".format( - iper + 1 - ) + f" loading ievt stress period {iper + 1:3d}..." ) t = Util2d.load( f, model, u2d_shape, np.int32, "ievt", ext_unit_dict diff --git a/flopy/modflow/mffhb.py b/flopy/modflow/mffhb.py index fb42c8ec6..bb642ad36 100644 --- a/flopy/modflow/mffhb.py +++ b/flopy/modflow/mffhb.py @@ -264,8 +264,7 @@ def __init__( if self.nflw > 0: if self.ds5 is None: raise TypeError( - "dataset 5 is not specified but " - "nflw > 0 ({})".format(self.nflw) + f"dataset 5 is not specified but nflw > 0 ({self.nflw})" ) if self.ds5.shape[0] != self.nflw: @@ -287,8 +286,7 @@ def __init__( if self.nhed > 0: if self.ds7 is None: raise TypeError( - "dataset 7 is not specified but " - "nhed > 0 ({})".format(self.nhed) + f"dataset 7 is not specified but nhed > 0 ({self.nhed})" ) if self.ds7.shape[0] != self.nhed: raise ValueError( @@ -325,9 +323,9 @@ def get_default_dtype(nbdtim=1, structured=True, head=False): dtype.append(("iaux", int)) for n in range(nbdtim): if head: - name = "sbhed{}".format(n + 1) + name = f"sbhed{n + 1}" else: - name = "flwrat{}".format(n + 1) + name = f"flwrat{n + 1}" dtype.append((name, np.float32)) return np.dtype(dtype) @@ -357,34 +355,34 @@ def write_file(self): # f.write('{0:s}\n'.format(self.heading)) # Data set 1 - f.write("{} ".format(self.nbdtim)) - f.write("{} ".format(self.nflw)) - f.write("{} ".format(self.nhed)) - f.write("{} ".format(self.ifhbss)) - f.write("{} ".format(self.ipakcb)) - f.write("{} ".format(self.nfhbx1)) - f.write("{}\n".format(self.nfhbx2)) + f.write(f"{self.nbdtim} ") + f.write(f"{self.nflw} ") + f.write(f"{self.nhed} ") + f.write(f"{self.ifhbss} ") + f.write(f"{self.ipakcb} ") + f.write(f"{self.nfhbx1} ") + f.write(f"{self.nfhbx2}\n") # Dataset 2 - flow auxiliary names # Dataset 3 - head auxiliary names # Dataset 4a IFHBUN CNSTM IFHBPT - f.write("{} ".format(self.unit_number[0])) - f.write("{} ".format(self.bdtimecnstm)) - f.write("{}\n".format(self.ifhbpt)) + f.write(f"{self.unit_number[0]} ") + f.write(f"{self.bdtimecnstm} ") + f.write(f"{self.ifhbpt}\n") # Dataset 4b for n in range(self.nbdtim): - f.write("{} ".format(self.bdtime[n])) + f.write(f"{self.bdtime[n]} ") f.write("\n") # Dataset 5 and 6 if self.nflw > 0: # Dataset 5a IFHBUN CNSTM IFHBPT - f.write("{} ".format(self.unit_number[0])) - f.write("{} ".format(self.cnstm5)) - f.write("{}\n".format(self.ifhbpt)) + f.write(f"{self.unit_number[0]} ") + f.write(f"{self.cnstm5} ") + f.write(f"{self.ifhbpt}\n") # Dataset 5b for n in range(self.nflw): @@ -392,7 +390,7 @@ def write_file(self): v = self.ds5[n][name] if name in ["k", "i", "j", "node"]: v += 1 - f.write("{} ".format(v)) + f.write(f"{v} ") f.write("\n") # Dataset 6a and 6b - flow auxiliary data @@ -402,9 +400,9 @@ def write_file(self): # Dataset 7 if self.nhed > 0: # Dataset 7a IFHBUN CNSTM IFHBPT - f.write("{} ".format(self.unit_number[0])) - f.write("{} ".format(self.cnstm7)) - f.write("{}\n".format(self.ifhbpt)) + f.write(f"{self.unit_number[0]} ") + f.write(f"{self.cnstm7} ") + f.write(f"{self.ifhbpt}\n") # Dataset 7b IFHBUN CNSTM IFHBPT for n in range(self.nhed): @@ -412,7 +410,7 @@ def write_file(self): v = self.ds7[n][name] if name in ["k", "i", "j", "node"]: v += 1 - f.write("{} ".format(v)) + f.write(f"{v} ") f.write("\n") # Dataset 8a and 8b - head auxiliary data @@ -601,8 +599,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): for naux in range(nfhbx1): if model.verbose: sys.stdout.write( - "loading fhb dataset 6a - aux " - "{}\n".format(naux + 1) + f"loading fhb dataset 6a - aux {naux + 1}\n" ) sys.stdout.write( "dataset 6a will not be preserved in " @@ -623,8 +620,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if model.verbose: sys.stdout.write( - "loading fhb dataset 6b - aux " - "{}\n".format(naux + 1) + f"loading fhb dataset 6b - aux {naux + 1}\n" ) sys.stdout.write( "dataset 6b will not be preserved in " @@ -690,8 +686,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): for naux in range(nfhbx1): if model.verbose: sys.stdout.write( - "loading fhb dataset 8a - aux " - "{}\n".format(naux + 1) + f"loading fhb dataset 8a - aux {naux + 1}\n" ) sys.stdout.write( "dataset 8a will not be preserved in " @@ -713,8 +708,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if model.verbose: sys.stdout.write( - "loading fhb dataset 8b - aux " - "{}\n".format(naux + 1) + f"loading fhb dataset 8b - aux {naux + 1}\n" ) sys.stdout.write( "dataset 8b will not be preserved in " diff --git a/flopy/modflow/mfflwob.py b/flopy/modflow/mfflwob.py index f4642b915..4051da3d2 100755 --- a/flopy/modflow/mfflwob.py +++ b/flopy/modflow/mfflwob.py @@ -315,35 +315,33 @@ def write_file(self): f_fbob = open(self.fn_path, "w") # write header - f_fbob.write("{}\n".format(self.heading)) + f_fbob.write(f"{self.heading}\n") # write sections 1 and 2 : NOTE- what about NOPRINT? - line = "{:10d}".format(self.nqfb) - line += "{:10d}".format(self.nqcfb) - line += "{:10d}".format(self.nqtfb) - line += "{:10d}".format(self.iufbobsv) + line = f"{self.nqfb:10d}" + line += f"{self.nqcfb:10d}" + line += f"{self.nqtfb:10d}" + line += f"{self.iufbobsv:10d}" if self.no_print or "NOPRINT" in self.options: line += "{: >10}".format("NOPRINT") line += "\n" f_fbob.write(line) - f_fbob.write("{:10e}\n".format(self.tomultfb)) + f_fbob.write(f"{self.tomultfb:10e}\n") # write sections 3-5 looping through observations groups c = 0 for i in range(self.nqfb): # while (i < self.nqfb): # write section 3 - f_fbob.write( - "{:10d}{:10d}\n".format(self.nqobfb[i], self.nqclfb[i]) - ) + f_fbob.write(f"{self.nqobfb[i]:10d}{self.nqclfb[i]:10d}\n") # Loop through observation times for the groups for j in range(self.nqobfb[i]): # write section 4 - line = "{:12}".format(self.obsnam[c]) - line += "{:8d}".format(self.irefsp[c] + 1) - line += "{:16.10g}".format(self.toffset[c]) - line += " {:10.4g}\n".format(self.flwobs[c]) + line = f"{self.obsnam[c]:12}" + line += f"{self.irefsp[c] + 1:8d}" + line += f"{self.toffset[c]:16.10g}" + line += f" {self.flwobs[c]:10.4g}\n" f_fbob.write(line) c += 1 # index variable @@ -353,12 +351,12 @@ def write_file(self): # set factor to 1.0 for all cells in group if self.nqclfb[i] < 0: self.factor[i, :] = 1.0 - line = "{:10d}".format(self.layer[i, j] + 1) - line += "{:10d}".format(self.row[i, j] + 1) - line += "{:10d}".format(self.column[i, j] + 1) + line = f"{self.layer[i, j] + 1:10d}" + line += f"{self.row[i, j] + 1:10d}" + line += f"{self.column[i, j] + 1:10d}" line += " ".format(self.factor[i, j]) # note is 10f good enough here? - line += "{:10f}\n".format(self.factor[i, j]) + line += f"{self.factor[i, j]:10f}\n" f_fbob.write(line) f_fbob.close() @@ -371,9 +369,9 @@ def write_file(self): # write header f_ins = open(sfname, "w") f_ins.write("jif @\n") - f_ins.write("StandardFile 0 1 {}\n".format(self.nqtfb)) + f_ins.write(f"StandardFile 0 1 {self.nqtfb}\n") for i in range(0, self.nqtfb): - f_ins.write("{}\n".format(self.obsnam[i])) + f_ins.write(f"{self.obsnam[i]}\n") f_ins.close() # swm: END hack for writing standard file @@ -610,10 +608,7 @@ def _get_ftype_from_filename(fn, ext_unit_dict=None): elif "rv" in ext.lower(): ftype = "RVOB" - msg = ( - "ModflowFlwob: filetype cannot be inferred " - "from file name {}".format(fn) - ) + msg = f"ModflowFlwob: filetype cannot be inferred from file name {fn}" if ftype is None: raise AssertionError(msg) diff --git a/flopy/modflow/mfgage.py b/flopy/modflow/mfgage.py index 5da93de38..4f79f1e61 100644 --- a/flopy/modflow/mfgage.py +++ b/flopy/modflow/mfgage.py @@ -87,7 +87,7 @@ def __init__( extension="gage", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): """ Package constructor. @@ -115,15 +115,13 @@ def __init__( if files is None: files = [] for idx in range(numgage): - files.append( - "{}.gage{}.go".format(model.name, idx + 1) - ) + files.append(f"{model.name}.gage{idx + 1}.go") if isinstance(files, np.ndarray): files = files.flatten().tolist() elif isinstance(files, str): files = [files] elif isinstance(files, int) or isinstance(files, float): - files = ["{}.go".format(files)] + files = [f"{files}.go"] if len(files) < numgage: raise Exception( "a filename needs to be provided for {} gages - {} " diff --git a/flopy/modflow/mfghb.py b/flopy/modflow/mfghb.py index 9dc5f249f..014280aab 100644 --- a/flopy/modflow/mfghb.py +++ b/flopy/modflow/mfghb.py @@ -213,15 +213,13 @@ def write_file(self, check=True): check ): # allows turning off package checks when writing files at model level self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) f_ghb = open(self.fn_path, "w") - f_ghb.write("{}\n".format(self.heading)) - f_ghb.write( - "{:10d}{:10d}".format(self.stress_period_data.mxact, self.ipakcb) - ) + f_ghb.write(f"{self.heading}\n") + f_ghb.write(f"{self.stress_period_data.mxact:10d}{self.ipakcb:10d}") for option in self.options: f_ghb.write(" {}".format(option)) f_ghb.write("\n") @@ -232,7 +230,7 @@ def add_record(self, kper, index, values): try: self.stress_period_data.add_record(kper, index, values) except Exception as e: - raise Exception("mfghb error adding record to list: " + str(e)) + raise Exception(f"mfghb error adding record to list: {e!s}") @staticmethod def get_empty(ncells=0, aux_names=None, structured=True): diff --git a/flopy/modflow/mfgmg.py b/flopy/modflow/mfgmg.py index 6c29ad520..1327fce41 100644 --- a/flopy/modflow/mfgmg.py +++ b/flopy/modflow/mfgmg.py @@ -292,26 +292,22 @@ def write_file(self): """ f_gmg = open(self.fn_path, "w") - f_gmg.write("%s\n" % self.heading) + f_gmg.write(f"{self.heading}\n") # dataset 0 f_gmg.write( - "{} {} {} {}\n".format( - self.rclose, self.iiter, self.hclose, self.mxiter - ) + f"{self.rclose} {self.iiter} {self.hclose} {self.mxiter}\n" ) # dataset 1 f_gmg.write( - "{} {} {} {}\n".format( - self.damp, self.iadamp, self.ioutgmg, self.iunitmhc - ) + f"{self.damp} {self.iadamp} {self.ioutgmg} {self.iunitmhc}\n" ) # dataset 2 - f_gmg.write("{} {} ".format(self.ism, self.isc)) + f_gmg.write(f"{self.ism} {self.isc} ") if self.iadamp == 2: - f_gmg.write("{} {} {}".format(self.dup, self.dlow, self.chglimit)) + f_gmg.write(f"{self.dup} {self.dlow} {self.chglimit}") f_gmg.write("\n") # dataset 3 - f_gmg.write("{}\n".format(self.relax)) + f_gmg.write(f"{self.relax}\n") f_gmg.close() @classmethod diff --git a/flopy/modflow/mfhfb.py b/flopy/modflow/mfhfb.py index 2f3bff0e4..4b1288ce0 100644 --- a/flopy/modflow/mfhfb.py +++ b/flopy/modflow/mfhfb.py @@ -199,10 +199,8 @@ def write_file(self): """ f_hfb = open(self.fn_path, "w") - f_hfb.write("{}\n".format(self.heading)) - f_hfb.write( - "{:10d}{:10d}{:10d}".format(self.nphfb, self.mxfb, self.nhfbnp) - ) + f_hfb.write(f"{self.heading}\n") + f_hfb.write(f"{self.nphfb:10d}{self.mxfb:10d}{self.nhfbnp:10d}") for option in self.options: f_hfb.write(" {}".format(option)) f_hfb.write("\n") @@ -212,7 +210,7 @@ def write_file(self): a[0] + 1, a[1] + 1, a[2] + 1, a[3] + 1, a[4] + 1, a[5] ) ) - f_hfb.write("{:10d}".format(self.nacthfb)) + f_hfb.write(f"{self.nacthfb:10d}") f_hfb.close() @staticmethod diff --git a/flopy/modflow/mfhob.py b/flopy/modflow/mfhob.py index 78d47ef80..5d0a9a89a 100755 --- a/flopy/modflow/mfhob.py +++ b/flopy/modflow/mfhob.py @@ -194,8 +194,8 @@ def _set_dimensions(self): for idx, obs in enumerate(self.obs_data): if not isinstance(obs, HeadObservation): msg += ( - "ModflowHob: obs_data entry {} ".format(idx) - + "is not a HeadObservation instance.\n" + f"ModflowHob: obs_data entry {idx} " + "is not a HeadObservation instance.\n" ) continue self.nh += obs.nobs @@ -222,20 +222,20 @@ def write_file(self): f = open(self.fn_path, "w") # write dataset 0 - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") # write dataset 1 - f.write("{:10d}".format(self.nh)) - f.write("{:10d}".format(self.mobs)) - f.write("{:10d}".format(self.maxm)) - f.write("{:10d}".format(self.iuhobsv)) - f.write("{:10.4g}".format(self.hobdry)) + f.write(f"{self.nh:10d}") + f.write(f"{self.mobs:10d}") + f.write(f"{self.maxm:10d}") + f.write(f"{self.iuhobsv:10d}") + f.write(f"{self.hobdry:10.4g}") if self.no_print or "NOPRINT" in self.options: f.write("{: >10}".format("NOPRINT")) f.write("\n") # write dataset 2 - f.write("{:10.4g}\n".format(self.tomulth)) + f.write(f"{self.tomulth:10.4g}\n") # write datasets 3-6 for idx, obs in enumerate(self.obs_data): @@ -243,44 +243,44 @@ def write_file(self): obsname = obs.obsname if isinstance(obsname, bytes): obsname = obsname.decode("utf-8") - line = "{:12s} ".format(obsname) + line = f"{obsname:12s} " layer = obs.layer if layer >= 0: layer += 1 - line += "{:10d} ".format(layer) - line += "{:10d} ".format(obs.row + 1) - line += "{:10d} ".format(obs.column + 1) + line += f"{layer:10d} " + line += f"{obs.row + 1:10d} " + line += f"{obs.column + 1:10d} " irefsp = obs.irefsp if irefsp >= 0: irefsp += 1 - line += "{:10d} ".format(irefsp) + line += f"{irefsp:10d} " if obs.nobs == 1: toffset = obs.time_series_data[0]["toffset"] hobs = obs.time_series_data[0]["hobs"] else: toffset = 0.0 hobs = 0.0 - line += "{:20} ".format(toffset) - line += "{:10.4f} ".format(obs.roff) - line += "{:10.4f} ".format(obs.coff) - line += "{:10.4f} ".format(hobs) - line += " # DATASET 3 - Observation {}".format(idx + 1) - f.write("{}\n".format(line)) + line += f"{toffset:20} " + line += f"{obs.roff:10.4f} " + line += f"{obs.coff:10.4f} " + line += f"{hobs:10.4f} " + line += f" # DATASET 3 - Observation {idx + 1}" + f.write(f"{line}\n") # dataset 4 if len(obs.mlay.keys()) > 1: line = "" for key, value in iter(obs.mlay.items()): - line += "{:5d}{:10.4f}".format(key + 1, value) - line += " # DATASET 4 - Observation {}".format(idx + 1) - f.write("{}\n".format(line)) + line += f"{key + 1:5d}{value:10.4f}" + line += f" # DATASET 4 - Observation {idx + 1}" + f.write(f"{line}\n") # dataset 5 if irefsp < 0: - line = "{:10d}".format(obs.itt) + line = f"{obs.itt:10d}" line += 103 * " " - line += " # DATASET 5 - Observation {}".format(idx + 1) - f.write("{}\n".format(line)) + line += f" # DATASET 5 - Observation {idx + 1}" + f.write(f"{line}\n") # dataset 6: if obs.nobs > 1: @@ -288,15 +288,13 @@ def write_file(self): obsname = t["obsname"] if isinstance(obsname, bytes): obsname = obsname.decode("utf-8") - line = "{:12s} ".format(obsname) - line += "{:10d} ".format(t["irefsp"] + 1) - line += "{:20} ".format(t["toffset"]) - line += "{:10.4f} ".format(t["hobs"]) + line = f"{obsname:12s} " + line += f"{t['irefsp'] + 1:10d} " + line += f"{t['toffset']:20} " + line += f"{t['hobs']:10.4f} " line += 55 * " " - line += " # DATASET 6 - Observation {}.{}".format( - idx + 1, jdx + 1 - ) - f.write("{}\n".format(line)) + line += f" # DATASET 6 - Observation {idx + 1}.{jdx + 1}" + f.write(f"{line}\n") # close the hob package file f.close() @@ -662,7 +660,7 @@ def __init__( else: names = [] for idx in range(self.nobs): - names.append("{}.{}".format(obsname, idx + 1)) + names.append(f"{obsname}.{idx + 1}") # make sure the length of names is greater than or equal to nobs else: if isinstance(names, str): diff --git a/flopy/modflow/mfhyd.py b/flopy/modflow/mfhyd.py index 18f8abbc3..e14efa502 100644 --- a/flopy/modflow/mfhyd.py +++ b/flopy/modflow/mfhyd.py @@ -184,10 +184,10 @@ def __init__( obs = ModflowHyd.get_empty(nhyd) if isinstance(obsdata, list): if len(obsdata) != nhyd: - e = "ModflowHyd: nhyd ({}) does not equal ".format( - nhyd - ) + "length of obsdata ({}).".format(len(obsdata)) - raise RuntimeError(e) + raise RuntimeError( + f"ModflowHyd: nhyd ({nhyd}) does not equal " + f"length of obsdata ({len(obsdata)})." + ) for idx in range(nhyd): obs["pckg"][idx] = obsdata[idx][0] obs["arr"][idx] = obsdata[idx][1] @@ -240,21 +240,17 @@ def write_file(self): f = open(self.fn_path, "w") # write dataset 1 - f.write( - "{} {} {} {}\n".format( - self.nhyd, self.ihydun, self.hydnoh, self.heading - ) - ) + f.write(f"{self.nhyd} {self.ihydun} {self.hydnoh} {self.heading}\n") # write dataset 2 for idx in range(self.nhyd): - f.write("{} ".format(self.obsdata["pckg"][idx].decode())) - f.write("{} ".format(self.obsdata["arr"][idx].decode())) - f.write("{} ".format(self.obsdata["intyp"][idx].decode())) - f.write("{} ".format(self.obsdata["klay"][idx] + 1)) - f.write("{} ".format(self.obsdata["xl"][idx])) - f.write("{} ".format(self.obsdata["yl"][idx])) - f.write("{} ".format(self.obsdata["hydlbl"][idx].decode())) + f.write(f"{self.obsdata['pckg'][idx].decode()} ") + f.write(f"{self.obsdata['arr'][idx].decode()} ") + f.write(f"{self.obsdata['intyp'][idx].decode()} ") + f.write(f"{self.obsdata['klay'][idx] + 1} ") + f.write(f"{self.obsdata['xl'][idx]} ") + f.write(f"{self.obsdata['yl'][idx]} ") + f.write(f"{self.obsdata['hydlbl'][idx].decode()} ") f.write("\n") # close hydmod file diff --git a/flopy/modflow/mflak.py b/flopy/modflow/mflak.py index c2942e7ad..c60aec531 100644 --- a/flopy/modflow/mflak.py +++ b/flopy/modflow/mflak.py @@ -387,9 +387,7 @@ def __init__( elif isinstance(stages, list): stages = np.array(stages) if stages.shape[0] != nlakes: - err = "stages shape should be ({}) but is only ({}).".format( - nlakes, stages.shape[0] - ) + err = f"stages shape should be ({nlakes}) but is only ({stages.shape[0]})." raise Exception(err) self.stages = stages if stage_range is None: @@ -401,8 +399,7 @@ def __init__( stage_range = np.array(stage_range) elif isinstance(stage_range, float): raise Exception( - "stage_range should be a list or " - "array of size ({}, 2)".format(nlakes) + f"stage_range should be a list or array of size ({nlakes}, 2)" ) if self.parent.dis.steady[0]: if stage_range.shape != (nlakes, 2): @@ -451,8 +448,7 @@ def __init__( flux_data[key] = td if len(list(flux_data.keys())) != nlakes: raise Exception( - "flux_data dictionary must " - "have {} entries".format(nlakes) + f"flux_data dictionary must have {nlakes} entries" ) elif isinstance(value, float) or isinstance(value, int): td = {} @@ -570,7 +566,7 @@ def write_file(self): else: tmplwrt = self.lwrt t = [itmp, itmp2, tmplwrt] - comment = "Stress period {}".format(kper + 1) + comment = f"Stress period {kper + 1}" f.write( write_fixed_var( t, free=self.parent.free_format_input, comment=comment @@ -760,8 +756,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): for iper in range(nper): if model.verbose: print( - " reading lak dataset 4 - " - "for stress period {}".format(iper + 1) + f" reading lak dataset 4 - for stress period {iper + 1}" ) line = f.readline().rstrip() if model.array_free_format: @@ -774,19 +769,17 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if itmp > 0: if model.verbose: print( - " reading lak dataset 5 - " - "for stress period {}".format(iper + 1) + f" reading lak dataset 5 - for stress period {iper + 1}" ) - name = "LKARR_StressPeriod_{}".format(iper) + name = f"LKARR_StressPeriod_{iper}" lakarr = Util3d.load( f, model, (nlay, nrow, ncol), np.int32, name, ext_unit_dict ) if model.verbose: print( - " reading lak dataset 6 - " - "for stress period {}".format(iper + 1) + f" reading lak dataset 6 - for stress period {iper + 1}" ) - name = "BDLKNC_StressPeriod_{}".format(iper) + name = f"BDLKNC_StressPeriod_{iper}" bdlknc = Util3d.load( f, model, @@ -801,8 +794,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if model.verbose: print( - " reading lak dataset 7 - " - "for stress period {}".format(iper + 1) + f" reading lak dataset 7 - for stress period {iper + 1}" ) line = f.readline().rstrip() t = line.split() @@ -811,8 +803,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if nslms > 0: if model.verbose: print( - " reading lak dataset 8 - " - "for stress period {}".format(iper + 1) + f" reading lak dataset 8 - for stress period {iper + 1}" ) for i in range(nslms): line = f.readline().rstrip() @@ -838,8 +829,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if itmp1 >= 0: if model.verbose: print( - " reading lak dataset 9 - " - "for stress period {}".format(iper + 1) + f" reading lak dataset 9 - for stress period {iper + 1}" ) ds9 = {} for n in range(nlakes): diff --git a/flopy/modflow/mflmt.py b/flopy/modflow/mflmt.py index 8a915c948..b837dd992 100644 --- a/flopy/modflow/mflmt.py +++ b/flopy/modflow/mflmt.py @@ -136,19 +136,11 @@ def write_file(self): """ f = open(self.fn_path, "w") - f.write("{}\n".format(self.heading)) - f.write("{:20s}\n".format("OUTPUT_FILE_NAME " + self.output_file_name)) - f.write( - "{:20s} {:10d}\n".format( - "OUTPUT_FILE_UNIT ", self.output_file_unit - ) - ) - f.write( - "{:20s}\n".format("OUTPUT_FILE_HEADER " + self.output_file_header) - ) - f.write( - "{:20s}\n".format("OUTPUT_FILE_FORMAT " + self.output_file_format) - ) + f.write(f"{self.heading}\n") + f.write(f"OUTPUT_FILE_NAME {self.output_file_name:20s}\n") + f.write(f"OUTPUT_FILE_UNIT {self.output_file_unit:10d}\n") + f.write(f"OUTPUT_FILE_HEADER {self.output_file_header:20s}\n") + f.write(f"OUTPUT_FILE_FORMAT {self.output_file_format:20s}\n") if self.package_flows: # check that the list is not empty # Generate a string to write pckgs = "" @@ -161,8 +153,7 @@ def write_file(self): if "all" in [x.lower() for x in self.package_flows]: pckgs += "ALL" - line = "PACKAGE_FLOWS " + pckgs - f.write("%s\n" % (line)) + f.write(f"PACKAGE_FLOWS {pckgs}\n") f.close() @@ -214,9 +205,9 @@ def load(cls, f, model, ext_unit_dict=None): # set default values if filename: prefix = os.path.splitext(os.path.basename(filename))[0] - output_file_name = prefix + ".ftl" + output_file_name = f"{prefix}.ftl" else: - output_file_name = model.name + ".ftl" + output_file_name = f"{model.name}.ftl" output_file_unit = 333 output_file_header = "standard" output_file_format = "unformatted" diff --git a/flopy/modflow/mflpf.py b/flopy/modflow/mflpf.py index dd3065f3f..ab90b9337 100644 --- a/flopy/modflow/mflpf.py +++ b/flopy/modflow/mflpf.py @@ -375,7 +375,7 @@ def write_file(self, check=True, f=None): # allows turning off package checks when writing files at model level if check: self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) @@ -391,7 +391,7 @@ def write_file(self, check=True, f=None): f = open(self.fn_path, "w") # Item 0: text - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") # Item 1: IBCFCB, HDRY, NPLPF, , OPTIONS if self.parent.version == "mfusg" and self.parent.structured == False: @@ -406,9 +406,7 @@ def write_file(self, check=True, f=None): ) else: f.write( - "{0:10d}{1:10.6G}{2:10d} {3:s}\n".format( - self.ipakcb, self.hdry, self.nplpf, self.options - ) + f"{self.ipakcb:10d}{self.hdry:10.6G}{self.nplpf:10d} {self.options}\n" ) # LAYTYP array f.write(self.laytyp.string) @@ -423,11 +421,7 @@ def write_file(self, check=True, f=None): # Item 7: WETFCT, IWETIT, IHDWET iwetdry = self.laywet.sum() if iwetdry > 0: - f.write( - "{0:10f}{1:10d}{2:10d}\n".format( - self.wetfct, self.iwetit, self.ihdwet - ) - ) + f.write(f"{self.wetfct:10f}{self.iwetit:10d}{self.ihdwet:10d}\n") transient = not dis.steady.all() for k in range(nlay): f.write(self.hk[k].get_file_entry()) @@ -600,7 +594,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # hk if model.verbose: - print(" loading hk layer {0:3d}...".format(k + 1)) + print(f" loading hk layer {k + 1:3d}...") if "hk" not in par_types: t = Util2d.load( f, model, (nrow, ncol), np.float32, "hk", ext_unit_dict @@ -615,7 +609,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # hani if chani[k] <= 0.0: if model.verbose: - print(" loading hani layer {0:3d}...".format(k + 1)) + print(f" loading hani layer {k + 1:3d}...") if "hani" not in par_types: t = Util2d.load( f, @@ -634,7 +628,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # vka if model.verbose: - print(" loading vka layer {0:3d}...".format(k + 1)) + print(f" loading vka layer {k + 1:3d}...") key = "vk" if layvka[k] != 0: key = "vani" @@ -657,7 +651,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # ss if model.verbose: - print(" loading ss layer {0:3d}...".format(k + 1)) + print(f" loading ss layer {k + 1:3d}...") if "ss" not in par_types: t = Util2d.load( f, model, (nrow, ncol), np.float32, "ss", ext_unit_dict @@ -672,7 +666,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # sy if laytyp[k] != 0: if model.verbose: - print(" loading sy layer {0:3d}...".format(k + 1)) + print(f" loading sy layer {k + 1:3d}...") if "sy" not in par_types: t = Util2d.load( f, @@ -692,7 +686,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # vkcb if dis.laycbd[k] > 0: if model.verbose: - print(" loading vkcb layer {0:3d}...".format(k + 1)) + print(f" loading vkcb layer {k + 1:3d}...") if "vkcb" not in par_types: t = Util2d.load( f, @@ -712,7 +706,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # wetdry if laywet[k] != 0 and laytyp[k] != 0: if model.verbose: - print(" loading wetdry layer {0:3d}...".format(k + 1)) + print(f" loading wetdry layer {k + 1:3d}...") t = Util2d.load( f, model, (nrow, ncol), np.float32, "wetdry", ext_unit_dict ) @@ -765,7 +759,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): ) if check: lpf.check( - f="{}.chk".format(lpf.name[0]), + f=f"{lpf.name[0]}.chk", verbose=lpf.parent.verbose, level=0, ) diff --git a/flopy/modflow/mfmlt.py b/flopy/modflow/mfmlt.py index a0f571afd..5f1f2870f 100644 --- a/flopy/modflow/mfmlt.py +++ b/flopy/modflow/mfmlt.py @@ -192,9 +192,7 @@ def load(cls, f, model, nrow=None, ncol=None, ext_unit_dict=None): else: mltnam = t[0].lower() if model.verbose: - sys.stdout.write( - ' reading data for "{:<10s}" mult\n'.format(mltnam) - ) + sys.stdout.write(f' reading data for "{mltnam:<10s}" mult\n') readArray = True kwrd = None if len(t) > 1: @@ -268,8 +266,7 @@ def mult_function(mult_dict, line): elif op == "^": multarray = multarray ** atemp else: - s = "Invalid MULT operation {}".format(op) - raise Exception(s) + raise Exception(f"Invalid MULT operation {op}") return multarray @staticmethod diff --git a/flopy/modflow/mfmnw2.py b/flopy/modflow/mfmnw2.py index 30ecd31e8..c9b077a58 100644 --- a/flopy/modflow/mfmnw2.py +++ b/flopy/modflow/mfmnw2.py @@ -847,16 +847,14 @@ def _getloc(n): if self.pumploc > 0: f_mnw.write( indent - + "{:.0f} {:.0f} {:.0f}\n".format( - self.pumplay, self.pumprow, self.pumpcol - ) + + f"{self.pumplay:.0f} {self.pumprow:.0f} {self.pumpcol:.0f}\n" ) elif self.pumploc < 0: - fmt = indent + "{}\n".format(float_format) + fmt = indent + f"{float_format}\n" f_mnw.write(fmt.format(self.zpump)) # dataset 2f if self.qlimit > 0: - fmt = indent + "{} ".format(float_format) + "{:.0f}" + fmt = indent + f"{float_format} " + "{:.0f}" f_mnw.write(fmt.format(self.hlim, self.qcut)) if self.qcut != 0: fmt = " {0} {0}".format(float_format) @@ -1764,13 +1762,13 @@ def _write_1(self, f_mnw): None """ - f_mnw.write("{:.0f} ".format(self.mnwmax)) + f_mnw.write(f"{self.mnwmax:.0f} ") if self.mnwmax < 0: - f_mnw.write("{:.0f} ".format(self.nodtot)) - f_mnw.write("{:.0f} {:.0f}".format(self.ipakcb, self.mnwprnt)) + f_mnw.write(f"{self.nodtot:.0f} ") + f_mnw.write(f"{self.ipakcb:.0f} {self.mnwprnt:.0f}") if len(self.aux) > 0: for abc in self.aux: - f_mnw.write(" aux {}".format(abc)) + f_mnw.write(f" aux {abc}") f_mnw.write("\n") def write_file( @@ -1801,7 +1799,7 @@ def write_file( f_mnw = open(self.fn_path, "w") # dataset 0 (header) - f_mnw.write("{0}\n".format(self.heading)) + f_mnw.write(f"{self.heading}\n") # dataset 1 self._write_1(f_mnw) @@ -1820,11 +1818,7 @@ def write_file( # dataset 3 for per in range(self.nper): - f_mnw.write( - "{:.0f} Stress Period {:.0f}\n".format( - self.itmp[per], per + 1 - ) - ) + f_mnw.write(f"{self.itmp[per]:.0f} Stress Period {per + 1}\n") if self.itmp[per] > 0: for n in range(self.itmp[per]): diff --git a/flopy/modflow/mfmnwi.py b/flopy/modflow/mfmnwi.py index 48a61c036..53026e888 100644 --- a/flopy/modflow/mfmnwi.py +++ b/flopy/modflow/mfmnwi.py @@ -137,7 +137,7 @@ def __init__( model.add_output_file( iu, fname=fname, - extension="{:04d}.mnwobs".format(iu), + extension=f"{iu:04d}.mnwobs", binflag=False, package=ModflowMnwi._ftype(), ) @@ -344,13 +344,13 @@ def write_file(self): # f.write('{}\n'.format(self.heading)) # dataset 1 - WEL1flag QSUMflag SYNDflag - line = "{:10d}".format(self.wel1flag) - line += "{:10d}".format(self.qsumflag) - line += "{:10d}\n".format(self.byndflag) + line = f"{self.wel1flag:10d}" + line += f"{self.qsumflag:10d}" + line += f"{self.byndflag:10d}\n" f.write(line) # dataset 2 - MNWOBS - f.write("{:10d}\n".format(self.mnwobs)) + f.write(f"{self.mnwobs:10d}\n") # dataset 3 - WELLID UNIT QNDflag QBHflag {CONCflag} # (Repeat MNWOBS times) @@ -366,10 +366,10 @@ def write_file(self): assert ( qhbflag >= 0 ), "QHBflag must be greater than or equal to zero." - line = "{:20s} ".format(wellid) - line += "{:5d} ".format(unit) - line += "{:5d} ".format(qndflag) - line += "{:5d} ".format(qhbflag) + line = f"{wellid:20s} " + line += f"{unit:5d} " + line += f"{qndflag:5d} " + line += f"{qhbflag:5d} " if nitems == 5: concflag = t[4] assert ( @@ -378,7 +378,7 @@ def write_file(self): assert isinstance( concflag, int ), "CONCflag must be an integer between 0 and 3." - line += "{:5d} ".format(concflag) + line += f"{concflag:5d} " line += "\n" f.write(line) diff --git a/flopy/modflow/mfnwt.py b/flopy/modflow/mfnwt.py index ed9e9edfd..7d33905f5 100644 --- a/flopy/modflow/mfnwt.py +++ b/flopy/modflow/mfnwt.py @@ -323,7 +323,7 @@ def write_file(self): """ # Open file for writing f = open(self.fn_path, "w") - f.write("%s\n" % self.heading) + f.write(f"{self.heading}\n") f.write( "{:10.3e}{:10.3e}{:10d}{:10.3e}{:10d}{:10d}{:10d}".format( self.headtol, @@ -341,33 +341,33 @@ def write_file(self): if option.lower() == "specified": isspecified = True if isspecified: - f.write("{0:10.4g}".format(self.dbdtheta)) - f.write("{0:10.4g}".format(self.dbdkappa)) - f.write("{0:10.4g}".format(self.dbdgamma)) - f.write("{0:10.4g}".format(self.momfact)) - f.write("{0:10d}".format(self.backflag)) + f.write(f"{self.dbdtheta:10.4g}") + f.write(f"{self.dbdkappa:10.4g}") + f.write(f"{self.dbdgamma:10.4g}") + f.write(f"{self.momfact:10.4g}") + f.write(f"{self.backflag:10d}") if self.backflag > 0: - f.write("{0:10d}".format(self.maxbackiter)) - f.write("{0:10.4g}".format(self.backtol)) - f.write("{0:10.4g}".format(self.backreduce)) + f.write(f"{self.maxbackiter:10d}") + f.write(f"{self.backtol:10.4g}") + f.write(f"{self.backreduce:10.4g}") f.write("\n") if self.linmeth == 1: - f.write("{0:10d}".format(self.maxitinner)) - f.write("{0:10d}".format(self.ilumethod)) - f.write("{0:10d}".format(self.levfill)) - f.write("{0:10.4g}".format(self.stoptol)) - f.write("{0:10d}".format(self.msdr)) + f.write(f"{self.maxitinner:10d}") + f.write(f"{self.ilumethod:10d}") + f.write(f"{self.levfill:10d}") + f.write(f"{self.stoptol:10.4g}") + f.write(f"{self.msdr:10d}") elif self.linmeth == 2: - f.write("{0:10d}".format(self.iacl)) - f.write("{0:10d}".format(self.norder)) - f.write("{0:10d}".format(self.level)) - f.write("{0:10d}".format(self.north)) - f.write("{0:10d}".format(self.iredsys)) - f.write("{0:10.4g}".format(self.rrctols)) - f.write("{0:10d}".format(self.idroptol)) - f.write("{0:10.4g}".format(self.epsrn)) - f.write("{0:10.4g}".format(self.hclosexmd)) - f.write("{0:10d}".format(self.mxiterxmd)) + f.write(f"{self.iacl:10d}") + f.write(f"{self.norder:10d}") + f.write(f"{self.level:10d}") + f.write(f"{self.north:10d}") + f.write(f"{self.iredsys:10d}") + f.write(f"{self.rrctols:10.4g}") + f.write(f"{self.idroptol:10d}") + f.write(f"{self.epsrn:10.4g}") + f.write(f"{self.hclosexmd:10.4g}") + f.write(f"{self.mxiterxmd:10d}") f.write("\n") diff --git a/flopy/modflow/mfoc.py b/flopy/modflow/mfoc.py index 7650455e0..d45f99f86 100644 --- a/flopy/modflow/mfoc.py +++ b/flopy/modflow/mfoc.py @@ -155,7 +155,7 @@ def __init__( unitnumber=None, filenames=None, label="LABEL", - **kwargs + **kwargs, ): """ @@ -393,15 +393,13 @@ def check(self, f=None, verbose=True, level=1, checktype=None): chk._add_to_summary( "Warning", package="OC", # value=kperkstp, - desc="action {!r} ignored; too few words".format( - action - ), + desc=f"action {action!r} ignored; too few words", ) elif words[0:2] not in expected_actions: chk._add_to_summary( "Warning", package="OC", # value=kperkstp, - desc="action {!r} ignored".format(action), + desc=f"action {action!r} ignored", ) # TODO: check data list of layers for some actions for kperkstp in keys: @@ -425,37 +423,31 @@ def write_file(self): """ f_oc = open(self.fn_path, "w") - f_oc.write("{}\n".format(self.heading)) + f_oc.write(f"{self.heading}\n") # write options - line = "HEAD PRINT FORMAT {0:3.0f}\n".format(self.ihedfm) + line = f"HEAD PRINT FORMAT {self.ihedfm:3.0f}\n" f_oc.write(line) if self.chedfm is not None: - line = "HEAD SAVE FORMAT {0:20s} {1}\n".format( - self.chedfm, self.label - ) + line = f"HEAD SAVE FORMAT {self.chedfm:20s} {self.label}\n" f_oc.write(line) if self.savehead: - line = "HEAD SAVE UNIT {0:5.0f}\n".format(self.iuhead) + line = f"HEAD SAVE UNIT {self.iuhead:5.0f}\n" f_oc.write(line) - f_oc.write("DRAWDOWN PRINT FORMAT {0:3.0f}\n".format(self.iddnfm)) + f_oc.write(f"DRAWDOWN PRINT FORMAT {self.iddnfm:3.0f}\n") if self.cddnfm is not None: - line = "DRAWDOWN SAVE FORMAT {0:20s} {1}\n".format( - self.cddnfm, self.label - ) + line = f"DRAWDOWN SAVE FORMAT {self.cddnfm:20s} {self.label}\n" f_oc.write(line) if self.saveddn: - line = "DRAWDOWN SAVE UNIT {0:5.0f}\n".format(self.iuddn) + line = f"DRAWDOWN SAVE UNIT {self.iuddn:5.0f}\n" f_oc.write(line) if self.saveibnd: if self.cboufm is not None: - line = "IBOUND SAVE FORMAT {0:20s} {1}\n".format( - self.cboufm, self.label - ) + line = f"IBOUND SAVE FORMAT {self.cboufm:20s} {self.label}\n" f_oc.write(line) - line = "IBOUND SAVE UNIT {0:5.0f}\n".format(self.iuibnd) + line = f"IBOUND SAVE UNIT {self.iuibnd:5.0f}\n" f_oc.write(line) if self.compact: @@ -491,13 +483,9 @@ def write_file(self): if "DDREFERENCE" in item.upper(): ddnref = item.lower() else: - lines += " {}\n".format(item) + lines += f" {item}\n" if len(lines) > 0: - f_oc.write( - "period {} step {} {}\n".format( - kper + 1, kstp + 1, ddnref - ) - ) + f_oc.write(f"period {kper + 1} step {kstp + 1} {ddnref}\n") f_oc.write(lines) f_oc.write("\n") ddnref = "" @@ -807,10 +795,10 @@ def load( # validate the size of nstp if len(nstp) != nper: - msg = "nstp must be a list with {} entries, ".format( - nper - ) + "provided nstp list has {} entries.".format(len(nstp)) - raise IOError(msg) + raise IOError( + f"nstp must be a list with {nper} entries, " + f"provided nstp list has {len(nstp)} entries." + ) # initialize ihedfm = 0 @@ -903,21 +891,21 @@ def load( hdpr, ddpr = int(lnlst[0]), int(lnlst[1]) hdsv, ddsv = int(lnlst[2]), int(lnlst[3]) if hdpr != 0: - headprint += " {}".format(k + 1) + headprint += f" {k + 1}" if ddpr != 0: - ddnprint += " {}".format(k + 1) + ddnprint += f" {k + 1}" if hdsv != 0: - headsave += " {}".format(k + 1) + headsave += f" {k + 1}" if ddsv != 0: - ddnsave += " {}".format(k + 1) + ddnsave += f" {k + 1}" if len(headprint) > 0: - lines.append("PRINT HEAD" + headprint) + lines.append(f"PRINT HEAD{headprint}") if len(ddnprint) > 0: - lines.append("PRINT DRAWDOWN" + ddnprint) + lines.append(f"PRINT DRAWDOWN{ddnprint}") if len(headsave) > 0: - lines.append("SAVE HEAD" + headsave) + lines.append(f"SAVE HEAD{headsave}") if len(ddnsave) > 0: - lines.append("SAVE DRAWDOWN" + ddnsave) + lines.append(f"SAVE DRAWDOWN{ddnsave}") stress_period_data[(iperoc, itsoc)] = list(lines) else: iperoc, itsoc = 0, 0 @@ -1024,13 +1012,9 @@ def load( stress_period_data[kperkstp] = [] # dataset 3 elif "PRINT" in lnlst[0].upper(): - lines.append( - "{} {}".format(lnlst[0].lower(), lnlst[1].lower()) - ) + lines.append(f"{lnlst[0].lower()} {lnlst[1].lower()}") elif "SAVE" in lnlst[0].upper(): - lines.append( - "{} {}".format(lnlst[0].lower(), lnlst[1].lower()) - ) + lines.append(f"{lnlst[0].lower()} {lnlst[1].lower()}") else: print("Error encountered in OC import.") print("Creating default OC package.") diff --git a/flopy/modflow/mfpar.py b/flopy/modflow/mfpar.py index d8b93b649..a1b6155e4 100644 --- a/flopy/modflow/mfpar.py +++ b/flopy/modflow/mfpar.py @@ -77,16 +77,12 @@ def set_zone(self, model, ext_unit_dict): ) if model.verbose: sys.stdout.write( - " {} package load...success\n".format( - self.zone.name[0] - ) + f" {self.zone.name[0]} package load...success\n" ) ext_unit_dict.pop(zone_key) model.remove_package("ZONE") except BaseException as o: - sys.stdout.write( - " {} package load...failed\n {!s}".format("ZONE", o) - ) + sys.stdout.write(f" ZONE package load...failed\n {o!s}") return def set_mult(self, model, ext_unit_dict): @@ -128,16 +124,12 @@ def set_mult(self, model, ext_unit_dict): ) if model.verbose: sys.stdout.write( - " {} package load...success\n".format( - self.mult.name[0] - ) + f" {self.mult.name[0]} package load...success\n" ) ext_unit_dict.pop(mult_key) model.remove_package("MULT") except BaseException as o: - sys.stdout.write( - " {} package load...failed\n {!s}".format("MULT", o) - ) + sys.stdout.write(f" MULT package load...failed\n {o!s}") return @@ -180,16 +172,12 @@ def set_pval(self, model, ext_unit_dict): ) if model.verbose: sys.stdout.write( - " {} package load...success\n".format( - self.pval.name[0] - ) + f" {self.pval.name[0]} package load...success\n" ) ext_unit_dict.pop(pval_key) model.remove_package("PVAL") except BaseException as o: - sys.stdout.write( - " {} package load...failed\n {!s}".format("PVAL", o) - ) + sys.stdout.write(f" PVAL package load...failed\n {o!s}") return @@ -229,7 +217,7 @@ def load(f, npar, verbose=False): t = line.strip().split() parnam = t[0].lower() if verbose: - print(' loading parameter "{}"...'.format(parnam)) + print(f' loading parameter "{parnam}"...') partyp = t[1].lower() if partyp not in par_types: par_types.append(partyp) diff --git a/flopy/modflow/mfparbc.py b/flopy/modflow/mfparbc.py index f8f665716..87671f9ab 100644 --- a/flopy/modflow/mfparbc.py +++ b/flopy/modflow/mfparbc.py @@ -82,7 +82,7 @@ def load(cls, f, npar, dt, model, ext_unit_dict=None, verbose=False): if parnam.endswith("'"): parnam = parnam[:-1] if verbose: - print(' loading parameter "{}"...'.format(parnam)) + print(f' loading parameter "{parnam}"...') partyp = t[1].lower() parval = t[2] nlst = int(t[3]) @@ -153,7 +153,7 @@ def loadarray(f, npar, verbose=False): t = line.strip().split() parnam = t[0].lower() if verbose: - print(' loading parameter "{}"...'.format(parnam)) + print(f' loading parameter "{parnam}"...') partyp = t[1].lower() parval = t[2] nclu = int(t[3]) diff --git a/flopy/modflow/mfpbc.py b/flopy/modflow/mfpbc.py index fdfb915ee..b3c8d72ec 100644 --- a/flopy/modflow/mfpbc.py +++ b/flopy/modflow/mfpbc.py @@ -103,18 +103,15 @@ def write_file(self): ctmp = c.shape[0] else: ctmp = -1 - f_pbc.write("{:10d}{:10d}{:10d}\n".format(itmp, ctmp, self.np)) + f_pbc.write(f"{itmp:10d}{ctmp:10d}{self.np:10d}\n") if n < len(self.layer_row_column_data): for b in a: - line = "{:10d}{:10d}{:10d}{:10d}{:10d}\n".format( - b[0], b[1], b[2], b[3], b[4] + f_pbc.write( + f"{b[0]:10d}{b[1]:10d}{b[2]:10d}{b[3]:10d}{b[4]:10d}\n" ) - f_pbc.write(line) if n < len(self.cosines): for d in c: - f_pbc.write( - "{:10g}{:10g}{:10g}\n".format(d[0], d[1], d[2]) - ) + f_pbc.write(f"{d[0]:10g}{d[1]:10g}{d[2]:10g}\n") f_pbc.close() @staticmethod diff --git a/flopy/modflow/mfpcg.py b/flopy/modflow/mfpcg.py index 055ae3998..11867309a 100644 --- a/flopy/modflow/mfpcg.py +++ b/flopy/modflow/mfpcg.py @@ -179,39 +179,39 @@ def write_file(self): """ f = open(self.fn_path, "w") - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") ifrfm = self.parent.get_ifrefm() if ifrfm: - f.write("{} ".format(self.mxiter)) - f.write("{} ".format(self.iter1)) - f.write("{} ".format(self.npcond)) - f.write("{}".format(self.ihcofadd)) + f.write(f"{self.mxiter} ") + f.write(f"{self.iter1} ") + f.write(f"{self.npcond} ") + f.write(f"{self.ihcofadd}") f.write("\n") - f.write("{} ".format(self.hclose)) - f.write("{} ".format(self.rclose)) - f.write("{} ".format(self.relax)) - f.write("{} ".format(self.nbpol)) - f.write("{} ".format(self.iprpcg)) - f.write("{} ".format(self.mutpcg)) - f.write("{} ".format(self.damp)) + f.write(f"{self.hclose} ") + f.write(f"{self.rclose} ") + f.write(f"{self.relax} ") + f.write(f"{self.nbpol} ") + f.write(f"{self.iprpcg} ") + f.write(f"{self.mutpcg} ") + f.write(f"{self.damp} ") if self.damp < 0: - f.write("{}".format(self.dampt)) + f.write(f"{self.dampt}") f.write("\n") else: - f.write(" {0:9d}".format(self.mxiter)) - f.write(" {0:9d}".format(self.iter1)) - f.write(" {0:9d}".format(self.npcond)) - f.write(" {0:9d}".format(self.ihcofadd)) + f.write(f" {self.mxiter:9d}") + f.write(f" {self.iter1:9d}") + f.write(f" {self.npcond:9d}") + f.write(f" {self.ihcofadd:9d}") f.write("\n") - f.write(" {0:9.3e}".format(self.hclose)) - f.write(" {0:9.3e}".format(self.rclose)) - f.write(" {0:9.3e}".format(self.relax)) - f.write(" {0:9d}".format(self.nbpol)) - f.write(" {0:9d}".format(self.iprpcg)) - f.write(" {0:9d}".format(self.mutpcg)) - f.write(" {0:9.3e}".format(self.damp)) + f.write(f" {self.hclose:9.3e}") + f.write(f" {self.rclose:9.3e}") + f.write(f" {self.relax:9.3e}") + f.write(f" {self.nbpol:9d}") + f.write(f" {self.iprpcg:9d}") + f.write(f" {self.mutpcg:9d}") + f.write(f" {self.damp:9.3e}") if self.damp < 0: - f.write(" {0:9.3e}".format(self.dampt)) + f.write(f" {self.dampt:9.3e}") f.write("\n") f.close() diff --git a/flopy/modflow/mfpcgn.py b/flopy/modflow/mfpcgn.py index d645cda49..9e6441983 100644 --- a/flopy/modflow/mfpcgn.py +++ b/flopy/modflow/mfpcgn.py @@ -279,10 +279,10 @@ def __init__( # check if a valid model version has been specified if model.version == "mfusg": - err = "Error: cannot use {} package ".format( - self.name - ) + "with model version {}".format(model.version) - raise Exception(err) + raise Exception( + f"Error: cannot use {self.name} package " + f"with model version {model.version}" + ) self._generate_heading() self.url = "pcgn.htm" @@ -324,38 +324,38 @@ def write_file(self): """ # Open file for writing f = open(self.fn_path, "w") - f.write("{0:s}\n".format(self.heading)) + f.write(f"{self.heading}\n") ifrfm = self.parent.get_ifrefm() if ifrfm: # dataset 1 - line = "{} ".format(self.iter_mo) - line += "{} ".format(self.iter_mi) - line += "{} ".format(self.close_r) - line += "{}\n".format(self.close_h) + line = f"{self.iter_mo} " + line += f"{self.iter_mi} " + line += f"{self.close_r} " + line += f"{self.close_h}\n" f.write(line) # dataset 2 - line = "{} ".format(self.relax) - line += "{} ".format(self.ifill) - line += "{} ".format(self.unit_pc) - line += "{}\n".format(self.unit_ts) + line = f"{self.relax} " + line += f"{self.ifill} " + line += f"{self.unit_pc} " + line += f"{self.unit_ts}\n" f.write(line) # dataset 3 - line = "{} ".format(self.adamp) - line += "{} ".format(self.damp) - line += "{} ".format(self.damp_lb) - line += "{} ".format(self.rate_d) - line += "{}\n".format(self.chglimit) + line = f"{self.adamp} " + line += f"{self.damp} " + line += f"{self.damp_lb} " + line += f"{self.rate_d} " + line += f"{self.chglimit}\n" f.write(line) # dataset 4 - line = "{} ".format(self.acnvg) - line += "{} ".format(self.cnvg_lb) - line += "{} ".format(self.mcnvg) - line += "{} ".format(self.rate_c) - line += "{}\n".format(self.ipunit) + line = f"{self.acnvg} " + line += f"{self.cnvg_lb} " + line += f"{self.mcnvg} " + line += f"{self.rate_c} " + line += f"{self.ipunit}\n" f.write(line) else: diff --git a/flopy/modflow/mfpks.py b/flopy/modflow/mfpks.py index 4244fe972..4a9844ce2 100644 --- a/flopy/modflow/mfpks.py +++ b/flopy/modflow/mfpks.py @@ -184,39 +184,39 @@ def write_file(self): """ # Open file for writing f = open(self.fn_path, "w") - f.write("%s\n" % self.heading) - f.write("MXITER {0}\n".format(self.mxiter)) - f.write("INNERIT {0}\n".format(self.innerit)) - f.write("ISOLVER {0}\n".format(self.isolver)) - f.write("NPC {0}\n".format(self.npc)) - f.write("ISCL {0}\n".format(self.iscl)) - f.write("IORD {0}\n".format(self.iord)) + f.write(f"{self.heading}\n") + f.write(f"MXITER {self.mxiter}\n") + f.write(f"INNERIT {self.innerit}\n") + f.write(f"ISOLVER {self.isolver}\n") + f.write(f"NPC {self.npc}\n") + f.write(f"ISCL {self.iscl}\n") + f.write(f"IORD {self.iord}\n") if self.ncoresm > 1: - f.write("NCORESM {0}\n".format(self.ncoresm)) + f.write(f"NCORESM {self.ncoresm}\n") if self.ncoresv > 1: - f.write("NCORESV {0}\n".format(self.ncoresv)) - f.write("DAMP {0}\n".format(self.damp)) - f.write("DAMPT {0}\n".format(self.dampt)) + f.write(f"NCORESV {self.ncoresv}\n") + f.write(f"DAMP {self.damp}\n") + f.write(f"DAMPT {self.dampt}\n") if self.npc > 0: - f.write("RELAX {0}\n".format(self.relax)) + f.write(f"RELAX {self.relax}\n") if self.npc == 3: - f.write("IFILL {0}\n".format(self.ifill)) - f.write("DROPTOL {0}\n".format(self.droptol)) - f.write("HCLOSEPKS {0}\n".format(self.hclose)) - f.write("RCLOSEPKS {0}\n".format(self.rclose)) + f.write(f"IFILL {self.ifill}\n") + f.write(f"DROPTOL {self.droptol}\n") + f.write(f"HCLOSEPKS {self.hclose}\n") + f.write(f"RCLOSEPKS {self.rclose}\n") if self.l2norm != None: if self.l2norm.lower() == "l2norm" or self.l2norm == "1": f.write("L2NORM\n") elif self.l2norm.lower() == "rl2norm" or self.l2norm == "2": f.write("RELATIVE-L2NORM\n") - f.write("IPRPKS {0}\n".format(self.iprpks)) - f.write("MUTPKS {0}\n".format(self.mutpks)) + f.write(f"IPRPKS {self.iprpks}\n") + f.write(f"MUTPKS {self.mutpks}\n") # MPI if self.mpi: - f.write("PARTOPT {0}\n".format(self.partopt)) - f.write("NOVLAPIMPSOL {0}\n".format(self.novlapimpsol)) - f.write("STENIMPSOL {0}\n".format(self.stenimpsol)) - f.write("VERBOSE {0}\n".format(self.verbose)) + f.write(f"PARTOPT {self.partopt}\n") + f.write(f"NOVLAPIMPSOL {self.novlapimpsol}\n") + f.write(f"STENIMPSOL {self.stenimpsol}\n") + f.write(f"VERBOSE {self.verbose}\n") if self.partopt == 1 | 2: pass # to be implemented diff --git a/flopy/modflow/mfpval.py b/flopy/modflow/mfpval.py index 84b6bf199..d349fea3c 100644 --- a/flopy/modflow/mfpval.py +++ b/flopy/modflow/mfpval.py @@ -188,7 +188,7 @@ def load(cls, f, model, ext_unit_dict=None): if model.verbose: sys.stdout.write( - ' reading parameter values from "{:<10s}"\n'.format(filename) + f' reading parameter values from "{filename:<10s}"\n' ) # read PVAL data diff --git a/flopy/modflow/mfrch.py b/flopy/modflow/mfrch.py index abb601b93..31f9387b6 100644 --- a/flopy/modflow/mfrch.py +++ b/flopy/modflow/mfrch.py @@ -274,7 +274,7 @@ def check( type="Warning", value=R_T.min(), desc=txt ) chk.remove_passed( - "Mean R/T is between {} and {}".format(RTmin, RTmax) + f"Mean R/T is between {RTmin} and {RTmax}" ) if len(greaterthan) > 0: @@ -287,11 +287,11 @@ def check( type="Warning", value=R_T.max(), desc=txt ) chk.remove_passed( - "Mean R/T is between {} and {}".format(RTmin, RTmax) + f"Mean R/T is between {RTmin} and {RTmax}" ) elif len(lessthan) == 0 and len(greaterthan) == 0: chk.append_passed( - "Mean R/T is between {} and {}".format(RTmin, RTmax) + f"Mean R/T is between {RTmin} and {RTmax}" ) # check for NRCHOP values != 3 @@ -334,7 +334,7 @@ def write_file(self, check=True, f=None): # allows turning off package checks when writing files at model level if check: self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) @@ -344,8 +344,8 @@ def write_file(self, check=True, f=None): f_rch = f else: f_rch = open(self.fn_path, "w") - f_rch.write("{0:s}\n".format(self.heading)) - f_rch.write("{0:10d}{1:10d}\n".format(self.nrchop, self.ipakcb)) + f_rch.write(f"{self.heading}\n") + f_rch.write(f"{self.nrchop:10d}{self.ipakcb:10d}\n") if self.nrchop == 2: irch = {} @@ -365,7 +365,7 @@ def write_file(self, check=True, f=None): for kper, u2d in self.irch.transient_2ds.items() ] ) - f_rch.write("{0:10d}\n".format(mxndrch)) + f_rch.write(f"{mxndrch:10d}\n") for kper in range(nper): inrech, file_entry_rech = self.rech.get_kper_entry(kper) @@ -376,9 +376,7 @@ def write_file(self, check=True, f=None): else: inirch = -1 f_rch.write( - "{0:10d}{1:10d} # {2:s}\n".format( - inrech, inirch, "Stress period " + str(kper + 1) - ) + f"{inrech:10d}{inirch:10d} # Stress period {kper + 1}\n" ) if inrech >= 0: f_rch.write(file_entry_rech) @@ -444,8 +442,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None, check=True): if npar > 0: if model.verbose: print( - " Parameters detected. " - "Number of parameters = {}".format(npar) + f" Parameters detected. Number of parameters = {npar}" ) line = f.readline() # dataset 2 @@ -495,8 +492,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None, check=True): if npar == 0: if model.verbose: print( - " loading rech stress " - "period {0:3d}...".format(iper + 1) + f" loading rech stress period {iper + 1:3d}..." ) t = Util2d.load( f, @@ -532,8 +528,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None, check=True): if inirch >= 0: if model.verbose: print( - " loading irch stress " - "period {0:3d}...".format(iper + 1) + f" loading irch stress period {iper + 1:3d}..." ) t = Util2d.load( f, model, u2d_shape, np.int32, "irch", ext_unit_dict @@ -571,7 +566,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None, check=True): ) if check: rch.check( - f="{}.chk".format(rch.name[0]), + f=f"{rch.name[0]}.chk", verbose=rch.parent.verbose, level=0, ) diff --git a/flopy/modflow/mfriv.py b/flopy/modflow/mfriv.py index b26927128..b5e1a26c4 100644 --- a/flopy/modflow/mfriv.py +++ b/flopy/modflow/mfriv.py @@ -123,7 +123,7 @@ def __init__( options=None, unitnumber=None, filenames=None, - **kwargs + **kwargs, ): """ Package constructor. @@ -240,7 +240,7 @@ def check(self, f=None, verbose=True, level=1, checktype=None): botms = self.parent.disu.bot.array[inds] for elev in ["stage", "rbot"]: - txt = "{} below cell bottom".format(elev) + txt = f"{elev} below cell bottom" chk.stress_period_data_values( spd, spd[elev] < botms, @@ -327,15 +327,13 @@ def write_file(self, check=True): # allows turning off package checks when writing files at model level if check: self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) f_riv = open(self.fn_path, "w") - f_riv.write("{0}\n".format(self.heading)) - line = "{0:10d}{1:10d}".format( - self.stress_period_data.mxact, self.ipakcb - ) + f_riv.write(f"{self.heading}\n") + line = f"{self.stress_period_data.mxact:10d}{self.ipakcb:10d}" for opt in self.options: line += " " + str(opt) line += "\n" @@ -347,7 +345,7 @@ def add_record(self, kper, index, values): try: self.stress_period_data.add_record(kper, index, values) except Exception as e: - raise Exception("mfriv error adding record to list: " + str(e)) + raise Exception(f"mfriv error adding record to list: {e!s}") @classmethod def load(cls, f, model, nper=None, ext_unit_dict=None, check=True): diff --git a/flopy/modflow/mfsfr2.py b/flopy/modflow/mfsfr2.py index d953e3f67..444aa5014 100644 --- a/flopy/modflow/mfsfr2.py +++ b/flopy/modflow/mfsfr2.py @@ -381,7 +381,7 @@ def __init__( ext = "bin" fname = filenames[2] if fname is None: - fname = model.name + ".sfr.{}".format(ext) + fname = f"{model.name}.sfr.{ext}" model.add_output_file( abs(istcb2), fname=fname, @@ -1123,7 +1123,7 @@ def check(self, f=None, verbose=True, level=1, checktype=None): if isinstance(f, str): pth = os.path.join(self.parent.model_ws, f) f = open(pth, "w") - f.write("{}\n".format(chk.txt)) + f.write(f"{chk.txt}\n") # f.close() return chk @@ -1470,7 +1470,7 @@ def get_variable_by_stress_period(self, varname): for per in range(self.nper): inds = self.segment_data[per].nseg - 1 all_data[inds, per] = self.segment_data[per][varname] - dtype.append(("{}{}".format(varname, per), float)) + dtype.append((f"{varname}{per}", float)) isvar = all_data.sum(axis=1) != 0 ra = np.core.records.fromarrays( all_data[isvar].transpose().copy(), dtype=dtype @@ -1639,7 +1639,7 @@ def plot_path(self, start_seg=None, end_seg=0, plot_segment_lines=True): ax.plot(dist, tops, label="Model top") ax.plot(dist, tmp.strtop, label="Streambed top") ax.set_xlabel("Distance along path, in miles") - ax.set_ylabel("Elevation, in {}".format(mfunits)) + ax.set_ylabel(f"Elevation, in {mfunits}") ymin, ymax = ax.get_ylim() plt.autoscale(False) @@ -1781,28 +1781,22 @@ def _write_1c(self, f_sfr): self.nstrm = abs( self.nstrm ) # see explanation for dataset 1c in online guide - f_sfr.write("{:.0f} ".format(self.isfropt)) + f_sfr.write(f"{self.isfropt:.0f} ") if self.isfropt > 1: f_sfr.write( - "{:.0f} {:.0f} {:.0f} ".format( - self.nstrail, self.isuzn, self.nsfrsets - ) + f"{self.nstrail:.0f} {self.isuzn:.0f} {self.nsfrsets:.0f} " ) if self.nstrm < 0: - f_sfr.write("{:.0f} ".format(self.isfropt)) + f_sfr.write(f"{self.isfropt:.0f} ") if self.isfropt > 1: f_sfr.write( - "{:.0f} {:.0f} {:.0f} ".format( - self.nstrail, self.isuzn, self.nsfrsets - ) + f"{self.nstrail:.0f} {self.isuzn:.0f} {self.nsfrsets:.0f} " ) if self.nstrm < 0 or self.transroute: - f_sfr.write("{:.0f} ".format(self.irtflg)) + f_sfr.write(f"{self.irtflg:.0f} ") if self.irtflg > 0: f_sfr.write( - "{:.0f} {:.8f} {:.8f} ".format( - self.numtim, self.weight, self.flwtol - ) + f"{self.numtim:.0f} {self.weight:.8f} {self.flwtol:.8f} " ) f_sfr.write("\n") @@ -2028,7 +2022,7 @@ def write_file(self, filename=None): f_sfr = open(self.fn_path, "w") # Item 0 -- header - f_sfr.write("{0}\n".format(self.heading)) + f_sfr.write(f"{self.heading}\n") # Item 1 if ( @@ -2078,14 +2072,14 @@ def write_file(self, filename=None): for d in self.channel_geometry_data[i][nseg][ k ]: - f_sfr.write("{:.2f} ".format(d)) + f_sfr.write(f"{d:.2f} ") f_sfr.write("\n") if icalc == 4: # nstrpts = self.segment_data[i][j][5] for k in range(3): for d in self.channel_flow_data[i][nseg][k]: - f_sfr.write("{:.2f} ".format(d)) + f_sfr.write(f"{d:.2f} ") f_sfr.write("\n") if self.tabfiles and i == 0: for j in sorted(self.tabfiles_dict.keys()): @@ -2269,7 +2263,7 @@ def __init__(self, sfrpackage, verbose=True, level=1): self.passed = [] self.warnings = [] self.errors = [] - self.txt = "\n{} ERRORS:\n".format(self.sfr.name[0]) + self.txt = f"\n{self.sfr.name[0]} ERRORS:\n" self.summary_array = None def _boolean_compare( @@ -2393,15 +2387,15 @@ def for_nans(self): isnan = np.any(np.isnan(np.array(self.reach_data.tolist())), axis=1) nanreaches = self.reach_data[isnan] if np.any(isnan): - txt += "Found {} reachs with nans:\n".format(len(nanreaches)) + txt += f"Found {len(nanreaches)} reachs with nans:\n" if self.level == 1: txt += _print_rec_array(nanreaches, delimiter=" ") for per, sd in self.segment_data.items(): isnan = np.any(np.isnan(np.array(sd.tolist())), axis=1) nansd = sd[isnan] if np.any(isnan): - txt += "Per {}: found {} segments with nans:\n".format( - per, len(nanreaches) + txt += ( + f"Per {per}: found {len(nanreaches)} segments with nans:\n" ) if self.level == 1: txt += _print_rec_array(nansd, delimiter=" ") @@ -2438,7 +2432,7 @@ def numbering(self): len(reaches), reaches, level=self.level, datatype="reach" ) if len(t) > 0: - txt += "Segment {} has {}".format(segment, t) + txt += f"Segment {segment} has {t}" if txt == "": passed = True self._txt_footer( @@ -2468,7 +2462,7 @@ def numbering(self): txt += "nseg outseg\n" t = "" for nseg, outseg in decreases: - t += "{} {}\n".format(nseg, outseg) + t += f"{nseg} {outseg}\n" txt += t # '\n'.join(textwrap.wrap(t, width=10)) if len(t) == 0: passed = True @@ -2501,7 +2495,7 @@ def routing(self): np.savetxt( f, circular_segs, fmt="%d", delimiter=",", header=txt ) - txt += "See {} for details.".format(f) + txt += f"See {f} for details." if self.verbose: print(txt) self._txt_footer(headertxt, txt, "circular routing", warning=False) @@ -2550,8 +2544,8 @@ def routing(self): segments_with_breaks = set(breaks_reach_data.iseg) if len(breaks) > 0: txt += ( - "{0} segments ".format(len(segments_with_breaks)) - + "with non-adjacent reaches found.\n" + f"{len(segments_with_breaks)} segments " + "with non-adjacent reaches found.\n" ) if self.level == 1: txt += "At segments:\n" @@ -2564,7 +2558,7 @@ def routing(self): with open(fpath, "w") as fp: fp.write(",".join(rd.dtype.names) + "\n") np.savetxt(fp, rd, "%s", ",") - txt += "See {} for details.".format(fpath) + txt += f"See {fpath} for details." if self.verbose: print(txt) self._txt_footer( @@ -2637,10 +2631,9 @@ def overlapping_conductance(self, tol=1e-6): if len(nodes_with_multiple_conductance) > 0: txt += ( - "{} model cells with multiple non-zero SFR conductances found.\n" - "This may lead to circular routing between collocated reaches.\n".format( - len(nodes_with_multiple_conductance) - ) + f"{len(nodes_with_multiple_conductance)} model cells with " + "multiple non-zero SFR conductances found.\n" + "This may lead to circular routing between collocated reaches.\n" ) if self.level == 1: txt += "Nodes with overlapping conductances:\n" @@ -2692,8 +2685,7 @@ def elevations(self, min_strtop=-10, max_strtop=15000): """ headertxt = ( - "Checking for streambed tops of less " - "than {}...\n".format(min_strtop) + f"Checking for streambed tops of less than {min_strtop}...\n" ) txt = "" if self.verbose: @@ -2716,15 +2708,12 @@ def elevations(self, min_strtop=-10, max_strtop=15000): if len(txt) == 0: passed = True else: - txt += "strtop not specified for isfropt={}\n".format( - self.sfr.isfropt - ) + txt += f"strtop not specified for isfropt={self.sfr.isfropt}\n" passed = True self._txt_footer(headertxt, txt, "minimum streambed top", passed) headertxt = ( - "Checking for streambed tops of " - "greater than {}...\n".format(max_strtop) + f"Checking for streambed tops of greater than {max_strtop}...\n" ) txt = "" if self.verbose: @@ -2742,8 +2731,8 @@ def elevations(self, min_strtop=-10, max_strtop=15000): if np.any(is_greater): above_max = self.reach_data[is_greater] txt += ( - "{} instances ".format(len(above_max)) - + "of streambed top above the maximum found.\n" + f"{len(above_max)} instances " + "of streambed top above the maximum found.\n" ) if self.level == 1: txt += "Reaches with high strtop:\n" @@ -2751,9 +2740,7 @@ def elevations(self, min_strtop=-10, max_strtop=15000): if len(txt) == 0: passed = True else: - txt += "strtop not specified for isfropt={}\n".format( - self.sfr.isfropt - ) + txt += f"strtop not specified for isfropt={self.sfr.isfropt}\n" passed = True self._txt_footer(headertxt, txt, "maximum streambed top", passed) @@ -2802,7 +2789,7 @@ def elevations(self, min_strtop=-10, max_strtop=15000): ], col1="d_elev", col2=np.zeros(len(segment_data)), - level0txt="Stress Period {}: ".format(per + 1) + level0txt=f"Stress Period {per + 1}: " + "{} segments encountered with elevdn > elevup.", level1txt="Backwards segments:", ) @@ -2840,7 +2827,7 @@ def elevations(self, min_strtop=-10, max_strtop=15000): ], col1="d_elev2", col2=np.zeros(len(non_outlets_seg_data)), - level0txt="Stress Period {}: ".format(per + 1) + level0txt=f"Stress Period {per + 1}: " + "{} segments encountered with segments encountered " "with outseg elevup > elevdn.", level1txt="Backwards segment connections:", @@ -3123,9 +3110,7 @@ def slope(self, minimum_slope=1e-4, maximum_slope=1.0): where stage is computed. """ headertxt = ( - "Checking for streambed slopes of less than {}...\n".format( - minimum_slope - ) + f"Checking for streambed slopes of less than {minimum_slope}...\n" ) txt = "" if self.verbose: @@ -3150,17 +3135,11 @@ def slope(self, minimum_slope=1e-4, maximum_slope=1.0): if len(txt) == 0: passed = True else: - txt += "slope not specified for isfropt={}\n".format( - self.sfr.isfropt - ) + txt += f"slope not specified for isfropt={self.sfr.isfropt}\n" passed = True self._txt_footer(headertxt, txt, "minimum slope", passed) - headertxt = ( - "Checking for streambed slopes of greater than {}...\n".format( - maximum_slope - ) - ) + headertxt = f"Checking for streambed slopes of greater than {maximum_slope}...\n" txt = "" if self.verbose: print(headertxt.strip()) @@ -3185,9 +3164,7 @@ def slope(self, minimum_slope=1e-4, maximum_slope=1.0): if len(txt) == 0: passed = True else: - txt += "slope not specified for isfropt={}\n".format( - self.sfr.isfropt - ) + txt += f"slope not specified for isfropt={self.sfr.isfropt}\n" passed = True self._txt_footer(headertxt, txt, "maximum slope", passed) @@ -3213,14 +3190,14 @@ def _check_numbers(n, numbers, level=1, datatype="reach"): txt = "" num_range = np.arange(1, n + 1) if not np.array_equal(num_range, numbers): - txt += "Invalid {} numbering\n".format(datatype) + txt += f"Invalid {datatype} numbering\n" if level == 1: # consistent dimension for boolean array non_consecutive = np.append(np.diff(numbers) != 1, False) gaps = num_range[non_consecutive] + 1 if len(gaps) > 0: gapstr = " ".join(map(str, gaps)) - txt += "Gaps in numbering at positions {}\n".format(gapstr) + txt += f"Gaps in numbering at positions {gapstr}\n" return txt @@ -3331,9 +3308,7 @@ def _fmt_string_list(array, float_format="{!s}"): "recarray to file - change to 'object' type".format(name) ) else: - raise ValueError( - "unknown dtype for {!r}: {!r}".format(name, vtype) - ) + raise ValueError(f"unknown dtype for {name!r}: {vtype!r}") return fmt_list diff --git a/flopy/modflow/mfsip.py b/flopy/modflow/mfsip.py index 20f6b7572..d3168a508 100644 --- a/flopy/modflow/mfsip.py +++ b/flopy/modflow/mfsip.py @@ -132,10 +132,10 @@ def __init__( # check if a valid model version has been specified if model.version == "mfusg": - err = "Error: cannot use {} package ".format( - self.name - ) + "with model version {}".format(model.version) - raise Exception(err) + raise Exception( + f"Error: cannot use {self.name} package " + f"with model version {model.version}" + ) self._generate_heading() self.url = "sip.htm" @@ -160,21 +160,15 @@ def write_file(self): """ # Open file for writing f = open(self.fn_path, "w") - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") ifrfm = self.parent.get_ifrefm() if ifrfm: - f.write("{} {}\n".format(self.mxiter, self.nparm)) + f.write(f"{self.mxiter} {self.nparm}\n") f.write( - "{} {} {} {} {}\n".format( - self.accl, - self.hclose, - self.ipcalc, - self.wseed, - self.iprsip, - ) + f"{self.accl} {self.hclose} {self.ipcalc} {self.wseed} {self.iprsip}\n" ) else: - f.write("{:10d}{:10d}\n".format(self.mxiter, self.nparm)) + f.write(f"{self.mxiter:10d}{self.nparm:10d}\n") f.write( "{:10.3f}{:10.3g}{:10d}{:10.3f}{:10d}\n".format( self.accl, diff --git a/flopy/modflow/mfsms.py b/flopy/modflow/mfsms.py index 64e951a72..909f8c733 100644 --- a/flopy/modflow/mfsms.py +++ b/flopy/modflow/mfsms.py @@ -353,7 +353,7 @@ def write_file(self): """ f = open(self.fn_path, "w") - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") nopt = len(self.options) if nopt > 0: f.write(" ".join(self.options) + "\n") @@ -487,13 +487,13 @@ def load(cls, f, model, ext_unit_dict=None): nonlinmeth = int(ll.pop(0)) linmeth = int(ll.pop(0)) if model.verbose: - print(" HCLOSE {}".format(hclose)) - print(" HICLOSE {}".format(hiclose)) - print(" MXITER {}".format(mxiter)) - print(" ITER1 {}".format(iter1)) - print(" IPRSMS {}".format(iprsms)) - print(" NONLINMETH {}".format(nonlinmeth)) - print(" LINMETH {}".format(linmeth)) + print(f" HCLOSE {hclose}") + print(f" HICLOSE {hiclose}") + print(f" MXITER {mxiter}") + print(f" ITER1 {iter1}") + print(f" IPRSMS {iprsms}") + print(f" NONLINMETH {nonlinmeth}") + print(f" LINMETH {linmeth}") # Record 2 theta = None @@ -524,14 +524,14 @@ def load(cls, f, model, ext_unit_dict=None): breduc = float(ll.pop(0)) reslim = float(ll.pop(0)) if model.verbose: - print(" THETA {}".format(theta)) - print(" AKAPPA {}".format(akappa)) - print(" GAMMA {}".format(gamma)) - print(" AMOMENTUM {}".format(amomentum)) - print(" NUMTRACK {}".format(numtrack)) - print(" BTOL {}".format(btol)) - print(" BREDUC {}".format(breduc)) - print(" RESLIM {}".format(reslim)) + print(f" THETA {theta}") + print(f" AKAPPA {akappa}") + print(f" GAMMA {gamma}") + print(f" AMOMENTUM {amomentum}") + print(f" NUMTRACK {numtrack}") + print(f" BTOL {btol}") + print(f" BREDUC {breduc}") + print(f" RESLIM {reslim}") iacl = None norder = None @@ -561,14 +561,14 @@ def load(cls, f, model, ext_unit_dict=None): idroptol = int(ll.pop(0)) epsrn = float(ll.pop(0)) if model.verbose: - print(" IACL {}".format(iacl)) - print(" NORDER {}".format(norder)) - print(" LEVEL {}".format(level)) - print(" NORTH {}".format(north)) - print(" IREDSYS {}".format(iredsys)) - print(" RRCTOL {}".format(rrctol)) - print(" IDROPTOL {}".format(idroptol)) - print(" EPSRN {}".format(epsrn)) + print(f" IACL {iacl}") + print(f" NORDER {norder}") + print(f" LEVEL {level}") + print(f" NORTH {north}") + print(f" IREDSYS {iredsys}") + print(f" RRCTOL {rrctol}") + print(f" IDROPTOL {idroptol}") + print(f" EPSRN {epsrn}") clin = None ipc = None @@ -593,12 +593,12 @@ def load(cls, f, model, ext_unit_dict=None): if len(ll) > 0: relaxpcgu = float(ll.pop(0)) if model.verbose: - print(" CLIN {}".format(clin)) - print(" IPC {}".format(ipc)) - print(" ISCL {}".format(iscl)) - print(" IORD {}".format(iord)) - print(" RCLOSEPCGU {}".format(rclosepcgu)) - print(" RELAXPCGU {}".format(relaxpcgu)) + print(f" CLIN {clin}") + print(f" IPC {ipc}") + print(f" ISCL {iscl}") + print(f" IORD {iord}") + print(f" RCLOSEPCGU {rclosepcgu}") + print(f" RELAXPCGU {relaxpcgu}") if openfile: f.close() diff --git a/flopy/modflow/mfsor.py b/flopy/modflow/mfsor.py index f090959a7..678081d76 100644 --- a/flopy/modflow/mfsor.py +++ b/flopy/modflow/mfsor.py @@ -116,10 +116,10 @@ def __init__( # check if a valid model version has been specified if model.version != "mf2k": - err = "Error: cannot use {} ".format( - self.name - ) + "package with model version {}".format(model.version) - raise Exception(err) + raise Exception( + f"Error: cannot use {self.name} " + f"package with model version {model.version}" + ) self._generate_heading() self.url = "sor.htm" @@ -140,11 +140,9 @@ def write_file(self): """ # Open file for writing f = open(self.fn_path, "w") - f.write("{}\n".format(self.heading)) - f.write("{:10d}\n".format(self.mxiter)) - line = "{:10.4g}{:10.4g}{:10d}\n".format( - self.accl, self.hclose, self.iprsor - ) + f.write(f"{self.heading}\n") + f.write(f"{self.mxiter:10d}\n") + line = f"{self.accl:10.4g}{self.hclose:10.4g}{self.iprsor:10d}\n" f.write(line) f.close() diff --git a/flopy/modflow/mfstr.py b/flopy/modflow/mfstr.py index 09b2cea24..1ba44296c 100644 --- a/flopy/modflow/mfstr.py +++ b/flopy/modflow/mfstr.py @@ -242,7 +242,7 @@ def __init__( unitnumber=None, filenames=None, options=None, - **kwargs + **kwargs, ): """ Package constructor. @@ -417,10 +417,7 @@ def __init__( "stress period" ) elif d == 0: - print( - " no str data for stress " - "period {}".format(key) - ) + print(f" no str data for stress period {key}") else: raise Exception( "ModflowStr error: unsupported data type: " @@ -454,8 +451,7 @@ def __init__( ) elif d == 0: print( - " no str segment data for " - "stress period {}".format(key) + f" no str segment data for stress period {key}" ) else: raise Exception( @@ -562,7 +558,7 @@ def write_file(self): f_str = open(self.fn_path, "w") # dataset 0 - f_str.write("{0}\n".format(self.heading)) + f_str.write(f"{self.heading}\n") # dataset 1 - parameters not supported on write @@ -761,14 +757,14 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): model.add_pop_key_list(istcb1) except: if model.verbose: - print(" could not remove unit number {}".format(istcb1)) + print(f" could not remove unit number {istcb1}") try: if istcb2 != 0: ipakcb = 53 model.add_pop_key_list(istcb2) except: if model.verbose: - print(" could not remove unit number {}".format(istcb2)) + print(f" could not remove unit number {istcb2}") options = [] aux_names = [] @@ -801,9 +797,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): segment_data = {} for iper in range(nper): if model.verbose: - print( - " loading {} for kper {:5d}".format(ModflowStr, iper + 1) - ) + print(f" loading {ModflowStr} for kper {iper + 1:5d}") line = f.readline() if line == "": break @@ -849,8 +843,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): except: if model.verbose: print( - " implicit static instance for " - "parameter {}".format(pname) + f" implicit static instance for parameter {pname}" ) par_dict, current_dict = pak_parms.get(pname) diff --git a/flopy/modflow/mfsub.py b/flopy/modflow/mfsub.py index 448bceaaa..9a665b929 100644 --- a/flopy/modflow/mfsub.py +++ b/flopy/modflow/mfsub.py @@ -500,39 +500,30 @@ def write_file(self, check=False, f=None): if f is None: f = open(self.fn_path, "w") # First line: heading - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") # write dataset 1 f.write( - "{} {} {} {} {} {} ".format( - self.ipakcb, - self.isuboc, - self.nndb, - self.ndb, - self.nmz, - self.nn, - ) + f"{self.ipakcb} {self.isuboc} {self.nndb} {self.ndb} {self.nmz} {self.nn} " ) f.write( - "{} {} {} {} {}".format( - self.ac1, self.ac2, self.itmin, self.idsave, self.idrest - ) + f"{self.ac1} {self.ac2} {self.itmin} {self.idsave} {self.idrest}" ) line = "" if self.idbit is not None: - line += " {}".format(self.idbit) + line += f" {self.idbit}" line += "\n" f.write(line) if self.nndb > 0: t = self.ln.array for tt in t: - f.write("{} ".format(tt + 1)) + f.write(f"{tt + 1} ") f.write("\n") if self.ndb > 0: t = self.ldn.array for tt in t: - f.write("{} ".format(tt + 1)) + f.write(f"{tt + 1} ") f.write("\n") # write dataset 4 @@ -551,9 +542,10 @@ def write_file(self, check=False, f=None): # write dataset 9 if self.ndb > 0: for k in range(self.nmz): - line = "{:15.6g} {:15.6g} {:15.6g}".format( - self.dp[k, 0], self.dp[k, 1], self.dp[k, 2] - ) + " #material zone {} data\n".format(k + 1) + line = ( + f"{self.dp[k, 0]:15.6g} {self.dp[k, 1]:15.6g} " + f"{self.dp[k, 2]:15.6g} #material zone {k + 1} data\n" + ) f.write(line) # write dataset 10 to 14 if self.ndb > 0: @@ -568,7 +560,7 @@ def write_file(self, check=False, f=None): if self.isuboc > 0: # dataset 15 for i in self.ids15: - f.write("{} ".format(i)) + f.write(f"{i} ") f.write(" #dataset 15\n") # dataset 16 @@ -576,8 +568,8 @@ def write_file(self, check=False, f=None): t = self.ids16[k, :] t[0:4] += 1 for i in t: - f.write("{} ".format(i)) - f.write(" #dataset 16 isuboc {}\n".format(k + 1)) + f.write(f"{i} ") + f.write(f" #dataset 16 isuboc {k + 1}\n") # close sub file f.close() @@ -676,7 +668,7 @@ def load(cls, f, model, ext_unit_dict=None): model, (nrow, ncol), np.float32, - "rnb delay bed {}".format(k + 1), + f"rnb delay bed {k + 1}", ext_unit_dict, ) rnb[k] = t @@ -694,56 +686,56 @@ def load(cls, f, model, ext_unit_dict=None): # hc if model.verbose: sys.stdout.write( - " loading sub dataset 5 for layer {}\n".format(kk) + f" loading sub dataset 5 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "hc layer {}".format(kk), + f"hc layer {kk}", ext_unit_dict, ) hc[k] = t # sfe if model.verbose: sys.stdout.write( - " loading sub dataset 6 for layer {}\n".format(kk) + f" loading sub dataset 6 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "sfe layer {}".format(kk), + f"sfe layer {kk}", ext_unit_dict, ) sfe[k] = t # sfv if model.verbose: sys.stdout.write( - " loading sub dataset 7 for layer {}\n".format(kk) + f" loading sub dataset 7 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "sfv layer {}".format(kk), + f"sfv layer {kk}", ext_unit_dict, ) sfv[k] = t # com if model.verbose: sys.stdout.write( - " loading sub dataset 8 for layer {}\n".format(kk) + f" loading sub dataset 8 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "com layer {}".format(kk), + f"com layer {kk}", ext_unit_dict, ) com[k] = t @@ -755,8 +747,7 @@ def load(cls, f, model, ext_unit_dict=None): for k in range(nmz): if model.verbose: sys.stdout.write( - " loading sub dataset 9 for material " - "zone {}\n".format(k + 1) + f" loading sub dataset 9 for material zone {k + 1}\n" ) line = f.readline() t = line.strip().split() @@ -778,70 +769,70 @@ def load(cls, f, model, ext_unit_dict=None): # dstart if model.verbose: sys.stdout.write( - " loading sub dataset 10 for layer {}\n".format(kk) + f" loading sub dataset 10 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "dstart layer {}".format(kk), + f"dstart layer {kk}", ext_unit_dict, ) dstart[k] = t # dhc if model.verbose: sys.stdout.write( - " loading sub dataset 11 for layer {}\n".format(kk) + f" loading sub dataset 11 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "dhc layer {}".format(kk), + f"dhc layer {kk}", ext_unit_dict, ) dhc[k] = t # dcom if model.verbose: sys.stdout.write( - " loading sub dataset 12 for layer {}\n".format(kk) + f" loading sub dataset 12 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "dcom layer {}".format(kk), + f"dcom layer {kk}", ext_unit_dict, ) dcom[k] = t # dz if model.verbose: sys.stdout.write( - " loading sub dataset 13 for layer {}\n".format(kk) + f" loading sub dataset 13 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "dz layer {}".format(kk), + f"dz layer {kk}", ext_unit_dict, ) dz[k] = t # nz if model.verbose: sys.stdout.write( - " loading sub dataset 14 for layer {}\n".format(kk) + f" loading sub dataset 14 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.int32, - "nz layer {}".format(kk), + f"nz layer {kk}", ext_unit_dict, ) nz[k] = t @@ -851,9 +842,7 @@ def load(cls, f, model, ext_unit_dict=None): if isuboc > 0: # dataset 15 if model.verbose: - sys.stdout.write( - " loading sub dataset 15 for layer {}\n".format(kk) - ) + sys.stdout.write(f" loading sub dataset 15 for layer {kk}\n") ids15 = np.empty(12, dtype=np.int32) ids15 = read1d(f, ids15) # dataset 16 @@ -861,8 +850,7 @@ def load(cls, f, model, ext_unit_dict=None): for k in range(isuboc): if model.verbose: sys.stdout.write( - " loading sub dataset 16 for " - "isuboc {}\n".format(k + 1) + f" loading sub dataset 16 for isuboc {k + 1}\n" ) t = np.empty(17, dtype=np.int32) t = read1d(f, t) diff --git a/flopy/modflow/mfswi2.py b/flopy/modflow/mfswi2.py index a1dd392ad..a8113e3c6 100644 --- a/flopy/modflow/mfswi2.py +++ b/flopy/modflow/mfswi2.py @@ -282,7 +282,7 @@ def __init__( if obsnam is None: obsnam = [] for n in range(nobs): - obsnam.append("Obs{:03}".format(n + 1)) + obsnam.append(f"Obs{n + 1:03}") else: if not isinstance(obsnam, list): obsnam = [obsnam] @@ -398,7 +398,7 @@ def __init__( (nlay, nrow, ncol), np.float32, zeta[i], - name="zeta_" + str(i + 1), + name=f"zeta_{i + 1}", ) ) self.ssz = Util3d( @@ -434,9 +434,7 @@ def write_file(self, check=True, f=None): f = open(self.fn_path, "w") # First line: heading - f.write( - "{}\n".format(self.heading) - ) # Writing heading not allowed in SWI??? + f.write(f"{self.heading}\n") # Writing heading not allowed in SWI??? # write dataset 1 f.write("# Dataset 1\n") @@ -459,39 +457,33 @@ def write_file(self, check=True, f=None): # write dataset 2a f.write("# Dataset 2a\n") - f.write( - "{:10d}{:10d}{:10d}\n".format( - self.nsolver, self.iprsol, self.mutsol - ) - ) + f.write(f"{self.nsolver:10d}{self.iprsol:10d}{self.mutsol:10d}\n") # write dataset 2b if self.nsolver == 2: f.write("# Dataset 2b\n") - f.write("{:10d}".format(self.solver2params["mxiter"])) - f.write("{:10d}".format(self.solver2params["iter1"])) - f.write("{:10d}".format(self.solver2params["npcond"])) - f.write("{:14.6g}".format(self.solver2params["zclose"])) - f.write("{:14.6g}".format(self.solver2params["rclose"])) - f.write("{:14.6g}".format(self.solver2params["relax"])) - f.write("{:10d}".format(self.solver2params["nbpol"])) - f.write("{:14.6g}".format(self.solver2params["damp"])) - f.write("{:14.6g}\n".format(self.solver2params["dampt"])) + f.write(f"{self.solver2params['mxiter']:10d}") + f.write(f"{self.solver2params['iter1']:10d}") + f.write(f"{self.solver2params['npcond']:10d}") + f.write(f"{self.solver2params['zclose']:14.6g}") + f.write(f"{self.solver2params['rclose']:14.6g}") + f.write(f"{self.solver2params['relax']:14.6g}") + f.write(f"{self.solver2params['nbpol']:10d}") + f.write(f"{self.solver2params['damp']:14.6g}") + f.write(f"{self.solver2params['dampt']:14.6g}\n") # write dataset 3a f.write("# Dataset 3a\n") - f.write("{:14.6g}{:14.6g}".format(self.toeslope, self.tipslope)) + f.write(f"{self.toeslope:14.6g}{self.tipslope:14.6g}") if self.alpha is not None: - f.write("{:14.6g}{:14.6g}".format(self.alpha, self.beta)) + f.write(f"{self.alpha:14.6g}{self.beta:14.6g}") f.write("\n") # write dataset 3b if self.adaptive is True: f.write("# Dataset 3b\n") f.write( - "{:10d}{:10d}{:14.6g}\n".format( - self.nadptmx, self.nadptmn, self.adptfct - ) + f"{self.nadptmx:10d}{self.nadptmn:10d}{self.adptfct:14.6g}\n" ) # write dataset 4 f.write("# Dataset 4\n") @@ -516,9 +508,9 @@ def write_file(self, check=True, f=None): f.write("# Dataset 8\n") for i in range(self.nobs): # f.write(self.obsnam[i] + 3 * '%10i' % self.obslrc + '\n') - f.write("{} ".format(self.obsnam[i])) + f.write(f"{self.obsnam[i]} ") for v in self.obslrc[i, :]: - f.write("{:10d}".format(v + 1)) + f.write(f"{v + 1:10d}") f.write("\n") # close swi2 file @@ -690,7 +682,7 @@ def load(cls, f, model, ext_unit_dict=None): break zeta = [] for n in range(nsrf): - ctxt = "zeta_surf{:02d}".format(n + 1) + ctxt = f"zeta_surf{n + 1:02d}" zeta.append( Util3d.load( f, diff --git a/flopy/modflow/mfswt.py b/flopy/modflow/mfswt.py index 852580bc1..3153e6c09 100644 --- a/flopy/modflow/mfswt.py +++ b/flopy/modflow/mfswt.py @@ -243,7 +243,7 @@ def write_file(self, f=None): if f is None: f = open(self.fn_path, "w") # First line: heading - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") # write dataset 1 f.write( "{} {} {} {} {} {} {}\n".format( @@ -259,7 +259,7 @@ def write_file(self, f=None): # write dataset 2 t = self.lnwt.array for tt in t: - f.write("{} ".format(tt + 1)) + f.write(f"{tt + 1} ") f.write("\n") # write dataset 3 @@ -310,7 +310,7 @@ def write_file(self, f=None): if self.iswtoc > 0: # dataset 16 for i in self.ids16: - f.write("{} ".format(i)) + f.write(f"{i} ") f.write(" #dataset 16\n") # dataset 17 @@ -318,8 +318,8 @@ def write_file(self, f=None): t = self.ids17[k, :].copy() t[0:4] += 1 for i in t: - f.write("{} ".format(i)) - f.write(" #dataset 17 iswtoc {}\n".format(k + 1)) + f.write(f"{i} ") + f.write(f" #dataset 17 iswtoc {k + 1}\n") # close swt file f.close() @@ -737,15 +737,13 @@ def load(cls, f, model, ext_unit_dict=None): kk = lnwt[k] + 1 # thick if model.verbose: - sys.stdout.write( - " loading swt dataset 7 for layer {}\n".format(kk) - ) + sys.stdout.write(f" loading swt dataset 7 for layer {kk}\n") t = Util2d.load( f, model, (nrow, ncol), np.float32, - "thick layer {}".format(kk), + f"thick layer {kk}", ext_unit_dict, ) thick[k] = t @@ -753,28 +751,28 @@ def load(cls, f, model, ext_unit_dict=None): # sse if model.verbose: sys.stdout.write( - " loading swt dataset 8 for layer {}\n".format(kk) + f" loading swt dataset 8 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "sse layer {}".format(kk), + f"sse layer {kk}", ext_unit_dict, ) sse[k] = t # ssv if model.verbose: sys.stdout.write( - " loading swt dataset 9 for layer {}\n".format(kk) + f" loading swt dataset 9 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "sse layer {}".format(kk), + f"sse layer {kk}", ext_unit_dict, ) ssv[k] = t @@ -782,56 +780,52 @@ def load(cls, f, model, ext_unit_dict=None): # cr if model.verbose: sys.stdout.write( - " loading swt dataset 10 for layer {}\n".format(kk) + f" loading swt dataset 10 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "cr layer {}".format(kk), + f"cr layer {kk}", ext_unit_dict, ) cr[k] = t # cc if model.verbose: sys.stdout.write( - " loading swt dataset 11 for layer {}\n".format(kk) + f" loading swt dataset 11 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "cc layer {}".format(kk), + f"cc layer {kk}", ext_unit_dict, ) cc[k] = t # void if model.verbose: - sys.stdout.write( - " loading swt dataset 12 for layer {}\n".format(kk) - ) + sys.stdout.write(f" loading swt dataset 12 for layer {kk}\n") t = Util2d.load( f, model, (nrow, ncol), np.float32, - "void layer {}".format(kk), + f"void layer {kk}", ext_unit_dict, ) void[k] = t # sub if model.verbose: - sys.stdout.write( - " loading swt dataset 13 for layer {}\n".format(kk) - ) + sys.stdout.write(f" loading swt dataset 13 for layer {kk}\n") t = Util2d.load( f, model, (nrow, ncol), np.float32, - "sub layer {}".format(kk), + f"sub layer {kk}", ext_unit_dict, ) sub[k] = t @@ -847,28 +841,28 @@ def load(cls, f, model, ext_unit_dict=None): if istpcs != 0: if model.verbose: sys.stdout.write( - " loading swt dataset 14 for layer {}\n".format(kk) + f" loading swt dataset 14 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "pcsoff layer {}".format(k + 1), + f"pcsoff layer {k + 1}", ext_unit_dict, ) pcsoff[k] = t else: if model.verbose: sys.stdout.write( - " loading swt dataset 15 for layer {}\n".format(kk) + f" loading swt dataset 15 for layer {kk}\n" ) t = Util2d.load( f, model, (nrow, ncol), np.float32, - "pcs layer {}".format(k + 1), + f"pcs layer {k + 1}", ext_unit_dict, ) pcs[k] = t @@ -878,9 +872,7 @@ def load(cls, f, model, ext_unit_dict=None): if iswtoc > 0: # dataset 16 if model.verbose: - sys.stdout.write( - " loading swt dataset 15 for layer {}\n".format(kk) - ) + sys.stdout.write(f" loading swt dataset 15 for layer {kk}\n") ids16 = np.empty(26, dtype=np.int32) ids16 = read1d(f, ids16) # for k in range(1, 26, 2): @@ -891,8 +883,7 @@ def load(cls, f, model, ext_unit_dict=None): for k in range(iswtoc): if model.verbose: sys.stdout.write( - " loading swt dataset 17 for " - "iswtoc {}\n".format(k + 1) + f" loading swt dataset 17 for iswtoc {k + 1}\n" ) t = np.empty(30, dtype=np.int32) t = read1d(f, t) diff --git a/flopy/modflow/mfupw.py b/flopy/modflow/mfupw.py index 2f6f5c9bf..89e98bf46 100644 --- a/flopy/modflow/mfupw.py +++ b/flopy/modflow/mfupw.py @@ -303,7 +303,7 @@ def write_file(self, check=True, f=None): # allows turning off package checks when writing files at model level if check: self.check( - f="{}.chk".format(self.name[0]), + f=f"{self.name[0]}.chk", verbose=self.parent.verbose, level=1, ) @@ -313,7 +313,7 @@ def write_file(self, check=True, f=None): else: f_upw = open(self.fn_path, "w") # Item 0: text - f_upw.write("{}\n".format(self.heading)) + f_upw.write(f"{self.heading}\n") # Item 1: IBCFCB, HDRY, NPLPF f_upw.write( "{0:10d}{1:10.3G}{2:10d}{3:10d}{4:s}\n".format( @@ -478,7 +478,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # hk if model.verbose: - print(" loading hk layer {0:3d}...".format(k + 1)) + print(f" loading hk layer {k + 1:3d}...") if "hk" not in par_types: t = Util2d.load( f, model, (nrow, ncol), np.float32, "hk", ext_unit_dict @@ -493,7 +493,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # hani if chani[k] < 0: if model.verbose: - print(" loading hani layer {0:3d}...".format(k + 1)) + print(f" loading hani layer {k + 1:3d}...") if "hani" not in par_types: t = Util2d.load( f, @@ -512,7 +512,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # vka if model.verbose: - print(" loading vka layer {0:3d}...".format(k + 1)) + print(f" loading vka layer {k + 1:3d}...") key = "vk" if layvka[k] != 0: key = "vani" @@ -532,7 +532,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # ss if model.verbose: - print(" loading ss layer {0:3d}...".format(k + 1)) + print(f" loading ss layer {k + 1:3d}...") if "ss" not in par_types: t = Util2d.load( f, model, (nrow, ncol), np.float32, "ss", ext_unit_dict @@ -547,7 +547,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # sy if laytyp[k] != 0: if model.verbose: - print(" loading sy layer {0:3d}...".format(k + 1)) + print(f" loading sy layer {k + 1:3d}...") if "sy" not in par_types: t = Util2d.load( f, @@ -567,7 +567,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): # vkcb if model.get_package("DIS").laycbd[k] > 0: if model.verbose: - print(" loading vkcb layer {0:3d}...".format(k + 1)) + print(f" loading vkcb layer {k + 1:3d}...") if "vkcb" not in par_types: t = Util2d.load( f, @@ -623,7 +623,7 @@ def load(cls, f, model, ext_unit_dict=None, check=True): ) if check: upw.check( - f="{}.chk".format(upw.name[0]), + f=f"{upw.name[0]}.chk", verbose=upw.parent.verbose, level=0, ) diff --git a/flopy/modflow/mfuzf1.py b/flopy/modflow/mfuzf1.py index 9c6ea5319..9cf1af356 100644 --- a/flopy/modflow/mfuzf1.py +++ b/flopy/modflow/mfuzf1.py @@ -449,7 +449,7 @@ def __init__( for key, value in uzgag.items(): fname = filenames[ipos] iu = abs(key) - uzgagext = "uzf{}.out".format(iu) + uzgagext = f"uzf{iu}.out" model.add_output_file( iu, fname=fname, @@ -729,7 +729,7 @@ def _write_1a(self, f_uzf): if self.nosurfleak > 0: specify_temp += "NOSURFLEAK" if (self.specifythtr + self.specifythti + self.nosurfleak) > 0: - f_uzf.write("{}\n".format(specify_temp)) + f_uzf.write(f"{specify_temp}\n") del specify_temp else: txt = "options\n" @@ -743,11 +743,11 @@ def _write_1a(self, f_uzf): ]: value = self.__dict__[var] if int(value) > 0: - txt += "{}\n".format(var) + txt += f"{var}\n" if self.etsquare: - txt += "etsquare {}\n".format(self.smoothfact) + txt += f"etsquare {self.smoothfact}\n" if self.netflux: - txt += "netflux {} {}\n".format(self.unitrech, self.unitdis) + txt += f"netflux {self.unitrech} {self.unitdis}\n" txt += "end\n" f_uzf.write(txt) @@ -769,7 +769,7 @@ def write_file(self, f=None): f_uzf = f else: f_uzf = open(self.fn_path, "w") - f_uzf.write("{}\n".format(self.heading)) + f_uzf.write(f"{self.heading}\n") # Dataset 1a if ( @@ -857,18 +857,18 @@ def write_file(self, f=None): comment = " #IUZROW IUZCOL IFTUNIT IUZOPT" values.insert(2, iftunit) for v in values: - f_uzf.write("{:10d}".format(v)) - f_uzf.write("{}\n".format(comment)) + f_uzf.write(f"{v:10d}") + f_uzf.write(f"{comment}\n") else: comment = " #IFTUNIT" - f_uzf.write("{:10d}".format(iftunit)) - f_uzf.write("{}\n".format(comment)) + f_uzf.write(f"{iftunit:10d}") + f_uzf.write(f"{comment}\n") def write_transient(name): invar, var = self.__dict__[name].get_kper_entry(n) - comment = " #{} for stress period ".format(name) + str(n + 1) - f_uzf.write("{0:10d}{1:20s}\n".format(invar, comment)) + comment = f" #{name} for stress period {n + 1}" + f_uzf.write(f"{invar:10d}{comment:20s}\n") if invar >= 0: f_uzf.write(var) @@ -1009,7 +1009,7 @@ def load(cls, f, model, ext_unit_dict=None, check=False): } def load_util2d(name, dtype, per=None): - print(" loading {} array...".format(name)) + print(f" loading {name} array...") if per is not None: arrays[name][per] = Util2d.load( f, model, (nrow, ncol), dtype, name, ext_unit_dict @@ -1063,7 +1063,7 @@ def load_util2d(name, dtype, per=None): # dataset 9 for per in range(nper): - print("stress period {}:".format(per + 1)) + print(f"stress period {per + 1}:") line = line_parse(f.readline()) nuzf1 = pop_item(line, int) @@ -1168,7 +1168,7 @@ def load_util2d(name, dtype, per=None): unitnumber=unitnumber, filenames=filenames, options=options, - **arrays + **arrays, ) @staticmethod diff --git a/flopy/modflow/mfwel.py b/flopy/modflow/mfwel.py index 1a3c8a063..cd44fde3a 100644 --- a/flopy/modflow/mfwel.py +++ b/flopy/modflow/mfwel.py @@ -247,7 +247,7 @@ def __init__( ladd = False break if ladd: - options.append("aux {} ".format(name)) + options.append(f"aux {name} ") if isinstance(self.options, OptionBlock): if not self.options.auxillary: @@ -294,7 +294,7 @@ def write_file(self, f=None): else: f_wel = open(self.fn_path, "w") - f_wel.write("%s\n" % self.heading) + f_wel.write(f"{self.heading}\n") if ( isinstance(self.options, OptionBlock) @@ -305,9 +305,7 @@ def write_file(self, f=None): if self.options.block: self.options.write_options(f_wel) - line = " {0:9d} {1:9d} ".format( - self.stress_period_data.mxact, self.ipakcb - ) + line = f" {self.stress_period_data.mxact:9d} {self.ipakcb:9d} " if isinstance(self.options, OptionBlock): if self.options.noprint: @@ -336,9 +334,7 @@ def write_file(self, f=None): else: if self.specify and self.parent.version == "mfnwt": f_wel.write( - "SPECIFY {0:10.5g} {1:10d}\n".format( - self.phiramp, self.iunitramp - ) + f"SPECIFY {self.phiramp:10.5g} {self.iunitramp:10d}\n" ) self.stress_period_data.write_transient(f_wel) @@ -348,7 +344,7 @@ def add_record(self, kper, index, values): try: self.stress_period_data.add_record(kper, index, values) except Exception as e: - raise Exception("mfwel error adding record to list: " + str(e)) + raise Exception(f"mfwel error adding record to list: {e!s}") @staticmethod def get_default_dtype(structured=True): diff --git a/flopy/modflow/mfzon.py b/flopy/modflow/mfzon.py index e65ae11a0..f976ee2a4 100644 --- a/flopy/modflow/mfzon.py +++ b/flopy/modflow/mfzon.py @@ -195,9 +195,7 @@ def load(cls, f, model, nrow=None, ncol=None, ext_unit_dict=None): else: zonnam = t[0].lower() if model.verbose: - sys.stdout.write( - ' reading data for "{:<10s}" zone\n'.format(zonnam) - ) + sys.stdout.write(f' reading data for "{zonnam:<10s}" zone\n') # load data t = Util2d.load( f, model, (nrow, ncol), np.int32, zonnam, ext_unit_dict diff --git a/flopy/modflowlgr/mflgr.py b/flopy/modflowlgr/mflgr.py index 4226091ba..009bf21bb 100644 --- a/flopy/modflowlgr/mflgr.py +++ b/flopy/modflowlgr/mflgr.py @@ -124,7 +124,7 @@ def __init__( model_ws=".", external_path=None, verbose=False, - **kwargs + **kwargs, ): super().__init__( modelname, @@ -133,7 +133,7 @@ def __init__( model_ws, structured=True, verbose=verbose, - **kwargs + **kwargs, ) self.version_types = {"mflgr": "MODFLOW-LGR"} @@ -183,11 +183,7 @@ def __init__( if external_path is not None: if os.path.exists(os.path.join(model_ws, external_path)): - print( - "Note: external_path " - + str(external_path) - + " already exists" - ) + print(f"Note: external_path {external_path} already exists") else: os.makedirs(os.path.join(model_ws, external_path)) self.external_path = external_path @@ -195,7 +191,7 @@ def __init__( return def __repr__(self): - return "MODFLOW-LGR model with {} grids".format(self.ngrids) + return f"MODFLOW-LGR model with {self.ngrids} grids" @property def ngrids(self): @@ -243,7 +239,7 @@ def _padline(self, line, comment=None, line_len=79): fmt = "{:" + str(line_len) + "s}" line = fmt.format(line) if comment is not None: - line += " # {}\n".format(comment) + line += f" # {comment}\n" return line def _get_path(self, bpth, pth, fpth=""): @@ -258,9 +254,9 @@ def _get_path(self, bpth, pth, fpth=""): "namefiles must be in the same directory as " "the lgr control file\n" ) - msg += "Control file path: {}\n".format(lpth) - msg += "Namefile path: {}\n".format(mpth) - msg += "Relative path: {}\n".format(rpth) + msg += f"Control file path: {lpth}\n" + msg += f"Namefile path: {mpth}\n" + msg += f"Relative path: {rpth}\n" raise ValueError(msg) return rpth @@ -294,7 +290,7 @@ def write_name_file(self): """ fn_path = os.path.join(self.model_ws, self.namefile) f = open(fn_path, "w") - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") # dataset 1 line = self._padline("LGR", comment="data set 1") @@ -317,7 +313,7 @@ def write_name_file(self): f.write(line) # dataset 5 - line = "{} {}".format(self.iupbhsv, self.iupbfsv) + line = f"{self.iupbhsv} {self.iupbfsv}" line = self._padline(line, comment="data set 5 - iupbhsv, iupbfsv") f.write(line) @@ -329,12 +325,12 @@ def write_name_file(self): pth = self._get_path( self._model_ws, child._model_ws, fpth=child.namefile ) - comment = "data set 6 - child {} namefile".format(idx + 1) + comment = f"data set 6 - child {idx + 1} namefile" line = self._padline(pth, comment=comment) f.write(line) # dataset 7 - comment = "data set 7 - child {} gridstatus".format(idx + 1) + comment = f"data set 7 - child {idx + 1} gridstatus" line = self._padline("CHILDONLY", comment=comment) f.write(line) @@ -346,31 +342,26 @@ def write_name_file(self): child_data.iucbfsv, ) comment = ( - "data set 8 - child {} " - "ishflg, ibflg, iucbhsv, iucbfsv".format(idx + 1) + f"data set 8 - child {idx + 1} ishflg, ibflg, iucbhsv, iucbfsv" ) line = self._padline(line, comment=comment) f.write(line) # dataset 9 - line = "{} {}".format(child_data.mxlgriter, child_data.ioutlgr) - comment = "data set 9 - child {} mxlgriter, ioutlgr".format( - idx + 1 - ) + line = f"{child_data.mxlgriter} {child_data.ioutlgr}" + comment = f"data set 9 - child {idx + 1} mxlgriter, ioutlgr" line = self._padline(line, comment=comment) f.write(line) # dataset 10 - line = "{} {}".format(child_data.relaxh, child_data.relaxf) - comment = "data set 10 - child {} relaxh, relaxf".format(idx + 1) + line = f"{child_data.relaxh} {child_data.relaxf}" + comment = f"data set 10 - child {idx + 1} relaxh, relaxf" line = self._padline(line, comment=comment) f.write(line) # dataset 11 - line = "{} {}".format(child_data.hcloselgr, child_data.fcloselgr) - comment = "data set 11 - child {} hcloselgr, fcloselgr".format( - idx + 1 - ) + line = f"{child_data.hcloselgr} {child_data.fcloselgr}" + comment = f"data set 11 - child {idx + 1} hcloselgr, fcloselgr" line = self._padline(line, comment=comment) f.write(line) @@ -380,9 +371,7 @@ def write_name_file(self): child_data.nprbeg + 1, child_data.npcbeg + 1, ) - comment = "data set 12 - child {} nplbeg, nprbeg, npcbeg".format( - idx + 1 - ) + comment = f"data set 12 - child {idx + 1} nplbeg, nprbeg, npcbeg" line = self._padline(line, comment=comment) f.write(line) @@ -392,23 +381,21 @@ def write_name_file(self): child_data.nprend + 1, child_data.npcend + 1, ) - comment = "data set 13 - child {} nplend, nprend, npcend".format( - idx + 1 - ) + comment = f"data set 13 - child {idx + 1} nplend, nprend, npcend" line = self._padline(line, comment=comment) f.write(line) # dataset 14 line = str(child_data.ncpp) - comment = "data set 14 - child {} ncpp".format(idx + 1) + comment = f"data set 14 - child {idx + 1} ncpp" line = self._padline(line, comment=comment) f.write(line) # dataset 15 line = "" for ndx in child_data.ncppl: - line += "{} ".format(ndx) - comment = "data set 15 - child {} ncppl".format(idx + 1) + line += f"{ndx} " + comment = f"data set 15 - child {idx + 1} ncppl" line = self._padline(line, comment=comment) f.write(line) @@ -439,7 +426,7 @@ def change_model_ws(self, new_pth=None, reset_external=False): if not os.path.exists(new_pth): try: sys.stdout.write( - "\ncreating model workspace...\n {}\n".format(new_pth) + f"\ncreating model workspace...\n {new_pth}\n" ) os.makedirs(new_pth) except: @@ -452,7 +439,7 @@ def change_model_ws(self, new_pth=None, reset_external=False): # --reset the model workspace old_pth = self._model_ws self._model_ws = new_pth - line = "\nchanging model workspace...\n {}\n".format(new_pth) + line = f"\nchanging model workspace...\n {new_pth}\n" sys.stdout.write(line) # reset model_ws for the parent @@ -541,7 +528,7 @@ def load( # dataset 1 ds1 = line.split()[0].lower() msg = "LGR must be entered as the first item in dataset 1\n" - msg += " {}\n".format(header) + msg += f" {header}\n" assert ds1 == "lgr", msg # dataset 2 diff --git a/flopy/modpath/mp6.py b/flopy/modpath/mp6.py index cdd424bf5..87e7c119c 100644 --- a/flopy/modpath/mp6.py +++ b/flopy/modpath/mp6.py @@ -99,8 +99,8 @@ def __init__( self.__mf = modflowmodel self.lst = Modpath6List(self, listunit=listunit) - self.mpnamefile = "{}.{}".format(self.name, namefile_ext) - self.mpbas_file = "{}.mpbas".format(modelname) + self.mpnamefile = f"{self.name}.{namefile_ext}" + self.mpbas_file = f"{modelname}.mpbas" if self.__mf is not None: # ensure that user-specified files are used iu = self.__mf.oc.iuhead @@ -189,17 +189,17 @@ def write_name_file(self): """ fn_path = os.path.join(self.model_ws, self.mpnamefile) f_nam = open(fn_path, "w") - f_nam.write("%s\n" % (self.heading)) + f_nam.write(f"{self.heading}\n") if self.mpbas_file is not None: - f_nam.write("%s %3i %s\n" % ("MPBAS", 86, self.mpbas_file)) + f_nam.write(f"MPBAS 86 {self.mpbas_file}\n") if self.dis_file is not None: - f_nam.write("%s %3i %s\n" % ("DIS", self.dis_unit, self.dis_file)) + f_nam.write(f"DIS {self.dis_unit:3} {self.dis_file}\n") if self.head_file is not None: - f_nam.write("%s %3i %s\n" % ("HEAD", 88, self.head_file)) + f_nam.write(f"HEAD 88 {self.head_file}\n") if self.budget_file is not None: - f_nam.write("%s %3i %s\n" % ("BUDGET", 89, self.budget_file)) + f_nam.write(f"BUDGET 89 {self.budget_file}\n") for u, f in zip(self.external_units, self.external_fnames): - f_nam.write("DATA {0:3d} ".format(u) + f + "\n") + f_nam.write(f"DATA {u:3d} {f}\n") f_nam.close() sim = property(getsim) # Property has no setter, so read-only @@ -323,7 +323,7 @@ def create_mpsim( for j in range(ncol): if arr[k, i, j] < 1: continue - group_name.append("wc{}".format(icnt)) + group_name.append(f"wc{icnt}") group_placement.append( [ Grid, @@ -364,7 +364,7 @@ def append_node(ifaces_well, wellid, node_number, k, i, j): else: ifaces.append(default_ifaces) face_ct.append(len(default_ifaces)) - group_name.append("{}{}".format(wellid, node_number)) + group_name.append(f"{wellid}{node_number}") group_placement.append( [ Grid, @@ -444,9 +444,7 @@ def append_node(ifaces_well, wellid, node_number, k, i, j): ParticleGenerationOption = 2 strt_file = package else: - raise Exception( - "package '{0}' not supported".format(package) - ) + raise Exception(f"package '{package}' not supported") SimulationType = 1 if simtype.lower() == "endpoint": diff --git a/flopy/modpath/mp6bas.py b/flopy/modpath/mp6bas.py index 829bca41d..9e682c612 100644 --- a/flopy/modpath/mp6bas.py +++ b/flopy/modpath/mp6bas.py @@ -137,13 +137,13 @@ def write_file(self): ModflowDis = self.parent.mf.get_package("DIS") # Open file for writing f_bas = open(self.fn_path, "w") - f_bas.write("#{0:s}\n#{1:s}\n".format(self.heading1, self.heading2)) - f_bas.write("{0:16.6f} {1:16.6f}\n".format(self.hnoflo, self.hdry)) - f_bas.write("{0:4d}\n".format(self.def_face_ct)) + f_bas.write(f"#{self.heading1}\n#{self.heading2}\n") + f_bas.write(f"{self.hnoflo:16.6f} {self.hdry:16.6f}\n") + f_bas.write(f"{self.def_face_ct:4d}\n") if self.def_face_ct > 0: for i in range(self.def_face_ct): - f_bas.write("{0:20s}\n".format(self.bud_label[i])) - f_bas.write("{0:2d}\n".format(self.def_iface[i])) + f_bas.write(f"{self.bud_label[i]:20s}\n") + f_bas.write(f"{self.def_iface[i]:2d}\n") # f_bas.write('\n') flow_package = self.parent.mf.get_package("BCF6") diff --git a/flopy/modpath/mp6sim.py b/flopy/modpath/mp6sim.py index 5630896a7..15c34e7db 100644 --- a/flopy/modpath/mp6sim.py +++ b/flopy/modpath/mp6sim.py @@ -88,8 +88,8 @@ def __init__( self.heading1 = "# MPSIM for Modpath, generated by Flopy." self.heading2 = "#" - self.mp_name_file = "{}.{}".format(model.name, "mpnam") - self.mp_list_file = "{}.{}".format(model.name, "mplst") + self.mp_name_file = f"{model.name}.mpnam" + self.mp_list_file = f"{model.name}.mplst" options_list = [ "SimulationType", "TrackingDirection", @@ -107,10 +107,10 @@ def __init__( self.option_flags = option_flags options_dict = dict(list(zip(options_list, option_flags))) self.options_dict = options_dict - self.endpoint_file = "{}.{}".format(model.name, "mpend") - self.pathline_file = "{}.{}".format(model.name, "mppth") - self.time_ser_file = "{}.{}".format(model.name, "mp.tim_ser") - self.advobs_file = "{}.{}".format(model.name, ".mp.advobs") + self.endpoint_file = f"{model.name}.mpend" + self.pathline_file = f"{model.name}.mppth" + self.time_ser_file = f"{model.name}.mp.tim_ser" + self.advobs_file = f"{model.name}.mp.advobs" self.ref_time = ref_time self.ref_time_per_stp = ref_time_per_stp self.stop_time = stop_time @@ -125,7 +125,7 @@ def __init__( self.face_ct = face_ct self.ifaces = ifaces self.part_ct = part_ct - self.strt_file = "{}.{}".format(model.name, "loc") + self.strt_file = f"{model.name}.loc" if strt_file is not None: self.strt_file = strt_file self.time_ct = time_ct @@ -134,7 +134,7 @@ def __init__( self.particle_cell_cnt = particle_cell_cnt self.cell_bd_ct = cell_bd_ct self.bud_loc = bud_loc - self.trace_file = "{}.{}".format(model.name, "trace_file.txt") + self.trace_file = f"{model.name}.trace_file.txt" self.trace_id = trace_id self.stop_zone = stop_zone self.zone = Util3d( @@ -218,53 +218,49 @@ def write_file(self): f_sim = open(self.fn_path, "w") # item 0 - f_sim.write("#{0:s}\n#{1:s}\n".format(self.heading1, self.heading2)) + f_sim.write(f"#{self.heading1}\n#{self.heading2}\n") # item 1 - f_sim.write("{0:s}\n".format(self.mp_name_file)) + f_sim.write(f"{self.mp_name_file}\n") # item 2 - f_sim.write("{0:s}\n".format(self.mp_list_file)) + f_sim.write(f"{self.mp_list_file}\n") # item 3 for i in range(12): - f_sim.write("{0:4d}".format(self.option_flags[i])) + f_sim.write(f"{self.option_flags[i]:4d}") f_sim.write("\n") # item 4 - f_sim.write("{0:s}\n".format(self.endpoint_file)) + f_sim.write(f"{self.endpoint_file}\n") # item 5 if self.options_dict["SimulationType"] == 2: - f_sim.write("{0:s}\n".format(self.pathline_file)) + f_sim.write(f"{self.pathline_file}\n") # item 6 if self.options_dict["SimulationType"] == 3: - f_sim.write("{0:s}\n".format(self.time_ser_file)) + f_sim.write(f"{self.time_ser_file}\n") # item 7 if ( self.options_dict["AdvectiveObservationsOption"] == 2 and self.option_dict["SimulationType"] == 3 ): - f_sim.write("{0:s}\n".format(self.advobs_file)) + f_sim.write(f"{self.advobs_file}\n") # item 8 if self.options_dict["ReferenceTimeOption"] == 1: - f_sim.write("{0:f}\n".format(self.ref_time)) + f_sim.write(f"{self.ref_time:f}\n") # item 9 if self.options_dict["ReferenceTimeOption"] == 2: Period, Step, TimeFraction = self.ref_time_per_stp - f_sim.write( - "{0:d} {1:d} {2:f}\n".format( - Period + 1, Step + 1, TimeFraction - ) - ) + f_sim.write(f"{Period + 1} {Step + 1} {TimeFraction:f}\n") # item 10 if self.options_dict["StopOption"] == 3: - f_sim.write("{0:f}\n".format(self.stop_time)) + f_sim.write(f"{self.stop_time:f}\n") if self.options_dict["ParticleGenerationOption"] == 1: # item 11 - f_sim.write("{0:d}\n".format(self.group_ct)) + f_sim.write(f"{self.group_ct}\n") for i in range(self.group_ct): # item 12 - f_sim.write("{0:s}\n".format(self.group_name[i])) + f_sim.write(f"{self.group_name[i]}\n") # item 13 ( Grid, @@ -291,9 +287,7 @@ def write_file(self): ReleaseEventCount, ) = self.release_times[i] f_sim.write( - "{0:f} {1:d}\n".format( - ReleasePeriodLength, ReleaseEventCount - ) + f"{ReleasePeriodLength:f} {ReleaseEventCount}\n" ) # item 15 if GridCellRegionOption == 1: @@ -320,12 +314,12 @@ def write_file(self): f_sim.write(self.mask_nlay[i].get_file_entry()) # item 17 if GridCellRegionOption == 3: - f_sim.write("{0:s}\n".format(self.mask_layer[i])) + f_sim.write(f"{self.mask_layer[i]}\n") # item 18 f_sim.write(self.mask_1lay[i].get_file_entry()) # item 19 and 20 if PlacementOption == 1: - f_sim.write("{0:d}\n".format(self.face_ct[i])) + f_sim.write(f"{self.face_ct[i]}\n") # item 20 for j in range(self.face_ct[i]): ( @@ -334,9 +328,7 @@ def write_file(self): ParticleColumnCount, ) = self.ifaces[i][j] f_sim.write( - "{0:d} {1:d} {2:d} \n".format( - IFace, ParticleRowCount, ParticleColumnCount - ) + f"{IFace} {ParticleRowCount} {ParticleColumnCount}\n" ) # item 21 elif PlacementOption == 2: @@ -355,7 +347,7 @@ def write_file(self): # item 22 if self.options_dict["ParticleGenerationOption"] == 2: - f_sim.write("{0:s}\n".format(self.strt_file)) + f_sim.write(f"{self.strt_file}\n") if self.options_dict["TimePointOption"] != 1: # item 23 @@ -363,14 +355,14 @@ def write_file(self): self.options_dict["TimePointOption"] == 2 or self.options_dict["TimePointOption"] == 3 ): - f_sim.write("{0:d}\n".format(self.time_ct)) + f_sim.write(f"{self.time_ct}\n") # item 24 if self.options_dict["TimePointOption"] == 2: - f_sim.write("{0:f}\n".format(self.release_time_incr)) + f_sim.write(f"{self.release_time_incr:f}\n") # item 25 if self.options_dict["TimePointOption"] == 3: for r in range(self.time_ct): - f_sim.write("{0:f}\n".format(self.time_pts[r])) + f_sim.write(f"{self.time_pts[r]:f}\n") if ( self.options_dict["BudgetOutputOption"] != 1 @@ -378,24 +370,22 @@ def write_file(self): ): # item 26 if self.options_dict["BudgetOutputOption"] == 3: - f_sim.write("{0:d}\n".format(self.cell_bd_ct)) + f_sim.write(f"{self.cell_bd_ct}\n") # item 27 for k in range(self.cell_bd_ct): Grid, Layer, Row, Column = self.bud_loc[k] f_sim.write( - "{0:d} {1:d} {2:d} {3:d} \n".format( - Grid, Layer + 1, Row + 1, Column + 1 - ) + f"{Grid} {Layer + 1} {Row + 1} {Column + 1} \n" ) if self.options_dict["BudgetOutputOption"] == 4: # item 28 - f_sim.write("{0:s}\n".format(self.trace_file)) + f_sim.write(f"{self.trace_file}\n") # item 29 - f_sim.write("{0:s}\n".format(self.trace_id)) + f_sim.write(f"{self.trace_id}\n") if self.options_dict["ZoneArrayOption"] != 1: # item 30 - f_sim.write("{0:d}\n".format(self.stop_zone)) + f_sim.write(f"{self.stop_zone}\n") # item 31 f_sim.write(self.zone.get_file_entry()) @@ -500,18 +490,18 @@ def write_file(self, data=None, float_format="{:.8f}"): data["i0"] += 1 data["j0"] += 1 with open(self.fn_path, "w") as output: - output.write("{}\n".format(self.heading)) - output.write("{:d}\n".format(self.input_style)) + output.write(f"{self.heading}\n") + output.write(f"{self.input_style}\n") groups = np.unique(data.groupname) ngroups = len(groups) - output.write("{:d}\n".format(ngroups)) + output.write(f"{ngroups}\n") for g in groups: npt = len(data[data.groupname == g]) - output.write("{}\n{:d}\n".format(g.decode(), npt)) + output.write(f"{g.decode()}\n{npt}\n") txt = "" for p in data: txt += "{:d} {:d} {:d} {:d} {:d} {:d}".format(*list(p)[:6]) fmtstr = " {0} {0} {0} {0} ".format(float_format) txt += fmtstr.format(*list(p)[6:10]) - txt += "{}\n".format(p[10].decode()) + txt += f"{p[10].decode()}\n" output.write(txt) diff --git a/flopy/modpath/mp7.py b/flopy/modpath/mp7.py index d9dc819af..d94040320 100644 --- a/flopy/modpath/mp7.py +++ b/flopy/modpath/mp7.py @@ -108,8 +108,8 @@ def __init__( self.lst = Modpath7List(self) - self.mpnamefile = "{}.{}".format(self.name, namefile_ext) - self.mpbas_file = "{}.mpbas".format(modelname) + self.mpnamefile = f"{self.name}.{namefile_ext}" + self.mpbas_file = f"{modelname}.mpbas" if not isinstance(flowmodel, (Modflow, MFModel)): raise TypeError( @@ -180,8 +180,8 @@ def __init__( # set dis and grbdis file name dis_file = None - grbdis_file = dis.filename + ".grb" - grbtag = "GRB{}".format(dis.package_name.upper()) + grbdis_file = f"{dis.filename}.grb" + grbtag = f"GRB{dis.package_name.upper()}" tdis = self.flowmodel.simulation.get_package("TDIS") if tdis is None: @@ -378,19 +378,19 @@ def write_name_file(self): """ fpth = os.path.join(self.model_ws, self.mpnamefile) f = open(fpth, "w") - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") if self.mpbas_file is not None: - f.write("{:10s} {}\n".format("MPBAS", self.mpbas_file)) + f.write(f"MPBAS {self.mpbas_file}\n") if self.dis_file is not None: - f.write("{:10s} {}\n".format("DIS", self.dis_file)) + f.write(f"DIS {self.dis_file}\n") if self.grbdis_file is not None: - f.write("{:10s} {}\n".format(self.grbtag, self.grbdis_file)) + f.write(f"{self.grbtag:10s} {self.grbdis_file}\n") if self.tdis_file is not None: - f.write("{:10s} {}\n".format("TDIS", self.tdis_file)) + f.write(f"TDIS {self.tdis_file}\n") if self.headfilename is not None: - f.write("{:10s} {}\n".format("HEAD", self.headfilename)) + f.write(f"HEAD {self.headfilename}\n") if self.budgetfilename is not None: - f.write("{:10s} {}\n".format("BUDGET", self.budgetfilename)) + f.write(f"BUDGET {self.budgetfilename}\n") f.close() @classmethod diff --git a/flopy/modpath/mp7bas.py b/flopy/modpath/mp7bas.py index 4c09bdebf..f47b424d6 100644 --- a/flopy/modpath/mp7bas.py +++ b/flopy/modpath/mp7bas.py @@ -134,18 +134,16 @@ def write_file(self, check=False): """ # Open file for writing f = open(self.fn_path, "w") - f.write("# {}\n".format(self.heading)) + f.write(f"# {self.heading}\n") if self.parent.flowmodel.version != "mf6": - f.write("{:g} {:g}\n".format(self.parent.hnoflo, self.parent.hdry)) + f.write(f"{self.parent.hnoflo:g} {self.parent.hdry:g}\n") # default IFACE - f.write( - "{:<20d}{}\n".format(self.defaultifacecount, "# DEFAULTIFACECOUNT") - ) + f.write(f"{self.defaultifacecount:<20d}# DEFAULTIFACECOUNT\n") if self.defaultifacecount > 0: for key, value in self.defaultiface.items(): - f.write("{:20s}{}\n".format(key, "# PACKAGE LABEL")) - f.write("{:<20d}{}\n".format(value, "# DEFAULT IFACE VALUE")) + f.write(f"{key:20s}# PACKAGE LABEL\n") + f.write(f"{value:<20d}# DEFAULT IFACE VALUE\n") # laytyp if self.parent.flow_version != "mf6": diff --git a/flopy/modpath/mp7particledata.py b/flopy/modpath/mp7particledata.py index 2b8cdb738..d63c66646 100644 --- a/flopy/modpath/mp7particledata.py +++ b/flopy/modpath/mp7particledata.py @@ -172,8 +172,7 @@ def __init__( partlocs = np.array(partlocs, dtype=dtype) else: raise ValueError( - "{}: partlocs must be a list or " - "tuple with lists or tuples".format(self.name) + f"{self.name}: partlocs must be a list or tuple with lists or tuples" ) # localx @@ -439,8 +438,7 @@ def _fmt_string(self): ) else: raise TypeError( - "MfList.fmt_string error: unknown vtype in " - "field: {}".format(field) + f"MfList.fmt_string error: unknown vtype in field: {field}" ) return " " + " ".join(fmts) @@ -803,18 +801,12 @@ def write(self, f=None): ) # item 2 - f.write( - "{} {}\n".format( - self.particletemplatecount, self.totalcellregioncount - ) - ) + f.write(f"{self.particletemplatecount} {self.totalcellregioncount}\n") for sd, lrcregion in zip(self.subdivisiondata, self.lrcregions): # item 3 f.write( - "{} {} {}\n".format( - sd.templatesubdivisiontype, lrcregion.shape[0], sd.drape - ) + f"{sd.templatesubdivisiontype} {lrcregion.shape[0]} {sd.drape}\n" ) # item 4 or 5 @@ -824,7 +816,7 @@ def write(self, f=None): for row in lrcregion: line = "" for lrc in row: - line += "{} ".format(lrc + 1) + line += f"{lrc + 1} " line += "\n" f.write(line) @@ -968,16 +960,12 @@ def write(self, f=None): ) # item 2 - f.write( - "{} {}\n".format(self.particletemplatecount, self.totalcellcount) - ) + f.write(f"{self.particletemplatecount} {self.totalcellcount}\n") for sd, nodes in zip(self.subdivisiondata, self.nodedata): # item 3 f.write( - "{} {} {}\n".format( - sd.templatesubdivisiontype, nodes.shape[0], sd.drape - ) + f"{sd.templatesubdivisiontype} {nodes.shape[0]} {sd.drape}\n" ) # item 4 or 5 @@ -986,7 +974,7 @@ def write(self, f=None): # item 6 line = "" for idx, node in enumerate(nodes): - line += " {}".format(node + 1) + line += f" {node + 1}" lineend = False if idx > 0: if idx % 10 == 0 or idx == nodes.shape[0] - 1: diff --git a/flopy/modpath/mp7particlegroup.py b/flopy/modpath/mp7particlegroup.py index 7fad64874..cfa4dc159 100644 --- a/flopy/modpath/mp7particlegroup.py +++ b/flopy/modpath/mp7particlegroup.py @@ -126,14 +126,14 @@ def write(self, fp=None, ws="."): ) # item 26 - fp.write("{}\n".format(self.particlegroupname)) + fp.write(f"{self.particlegroupname}\n") # item 27 - fp.write("{}\n".format(self.releaseoption)) + fp.write(f"{self.releaseoption}\n") if self.releaseoption == 1: # item 28 - fp.write("{}\n".format(self.releasetimes[0])) + fp.write(f"{self.releasetimes[0]}\n") elif self.releaseoption == 2: # item 29 fp.write( @@ -145,7 +145,7 @@ def write(self, fp=None, ws="."): ) elif self.releaseoption == 3: # item 30 - fp.write("{}\n".format(self.releasetimecount)) + fp.write(f"{self.releasetimecount}\n") # item 31 tp = self.releasetimes v = Util2d( @@ -155,7 +155,7 @@ def write(self, fp=None, ws="."): # item 32 if self.external: - line = "EXTERNAL {}\n".format(self.filename) + line = f"EXTERNAL {self.filename}\n" else: line = "INTERNAL\n" fp.write(line) @@ -225,10 +225,10 @@ def __init__( # convert particledata to a list if a ParticleData type if not isinstance(particledata, ParticleData): - msg = "{}: particledata must be a".format( - self.name - ) + " ParticleData instance not a {}".format(type(particledata)) - raise TypeError(msg) + raise TypeError( + f"{self.name}: particledata must be a " + f"ParticleData instance not a {type(particledata)}" + ) # set attributes self.inputstyle = 1 @@ -266,13 +266,13 @@ def write(self, fp=None, ws="."): f = fp # particle data item 1 - f.write("{}\n".format(self.inputstyle)) + f.write(f"{self.inputstyle}\n") # particle data item 2 - f.write("{}\n".format(self.locationstyle)) + f.write(f"{self.locationstyle}\n") # particle data item 3 - f.write("{} {}\n".format(self.particlecount, self.particleidoption)) + f.write(f"{self.particlecount} {self.particleidoption}\n") # particle data item 4 and 5 # call the write method in ParticleData @@ -406,7 +406,7 @@ def write(self, fp=None, ws="."): f = fp # item 1 - f.write("{}\n".format(self.inputstyle)) + f.write(f"{self.inputstyle}\n") # items 2, 3, 4 or 5, and 6 self.particledata.write(f) @@ -504,7 +504,7 @@ def write(self, fp=None, ws="."): f = fp # item 1 - f.write("{}\n".format(self.inputstyle)) + f.write(f"{self.inputstyle}\n") # items 2, 3, 4 or 5, and 6 self.particledata.write(f) diff --git a/flopy/modpath/mp7sim.py b/flopy/modpath/mp7sim.py index a05cd024e..8a0491576 100644 --- a/flopy/modpath/mp7sim.py +++ b/flopy/modpath/mp7sim.py @@ -36,11 +36,11 @@ def sim_enum_error(v, s, e): ------- """ - msg = "Invalid {} ({}). Valid types are ".format(v, s) + msg = f"Invalid {v} ({s}). Valid types are " for i, c in enumerate(e): if i > 0: msg += ", " - msg += '"{}"'.format(c.name) + msg += f'"{c.name}"' raise ValueError(msg) @@ -282,22 +282,22 @@ def __init__( # set file names if mpnamefilename is None: - mpnamefilename = "{}.{}".format(model.name, "mpnam") + mpnamefilename = f"{model.name}.mpnam" self.mp_name_file = mpnamefilename if listingfilename is None: - listingfilename = "{}.{}".format(model.name, "mplst") + listingfilename = f"{model.name}.mplst" self.listingfilename = listingfilename if endpointfilename is None: - endpointfilename = "{}.{}".format(model.name, "mpend") + endpointfilename = f"{model.name}.mpend" self.endpointfilename = endpointfilename if pathlinefilename is None: - pathlinefilename = "{}.{}".format(model.name, "mppth") + pathlinefilename = f"{model.name}.mppth" self.pathlinefilename = pathlinefilename if timeseriesfilename is None: - timeseriesfilename = "{}.{}".format(model.name, "timeseries") + timeseriesfilename = f"{model.name}.timeseries" self.timeseriesfilename = timeseriesfilename if tracefilename is None: - tracefilename = "{}.{}".format(model.name, "trace") + tracefilename = f"{model.name}.trace" self.tracefilename = tracefilename try: @@ -510,8 +510,7 @@ def __init__( stopzone = -1 if stopzone < -1: raise ValueError( - "Specified stopzone value ({}) " - "must be greater than 0.".format(stopzone) + f"Specified stopzone value ({stopzone}) must be greater than 0." ) self.stopzone = stopzone if zones is None: @@ -583,11 +582,11 @@ def write_file(self, check=False): f = open(self.fn_path, "w") # item 0 - f.write("{}\n".format(self.heading)) + f.write(f"{self.heading}\n") # item 1 - f.write("{}\n".format(self.mp_name_file)) + f.write(f"{self.mp_name_file}\n") # item 2 - f.write("{}\n".format(self.listingfilename)) + f.write(f"{self.listingfilename}\n") # item 3 f.write( "{} {} {} {} {} {}\n".format( @@ -600,23 +599,21 @@ def write_file(self, check=False): ) ) # item 4 - f.write("{}\n".format(self.endpointfilename)) + f.write(f"{self.endpointfilename}\n") # item 5 if self.simulationtype == 2 or self.simulationtype == 4: - f.write("{}\n".format(self.pathlinefilename)) + f.write(f"{self.pathlinefilename}\n") # item 6 if self.simulationtype == 3 or self.simulationtype == 4: - f.write("{}\n".format(self.timeseriesfilename)) + f.write(f"{self.timeseriesfilename}\n") # item 7 and 8 if self.tracemode == 1: - f.write("{}\n".format(self.tracefilename)) + f.write(f"{self.tracefilename}\n") f.write( - "{} {}\n".format( - self.traceparticlegroup + 1, self.traceparticleid + 1 - ) + f"{self.traceparticlegroup + 1} {self.traceparticleid + 1}\n" ) # item 9 - f.write("{}\n".format(self.BudgetCellCount)) + f.write(f"{self.BudgetCellCount}\n") # item 10 if self.BudgetCellCount > 0: v = Util2d( @@ -630,10 +627,10 @@ def write_file(self, check=False): f.write(v.string) # item 11 - f.write("{}\n".format(self.referencetimeOption)) + f.write(f"{self.referencetimeOption}\n") if self.referencetimeOption == 1: # item 12 - f.write("{:g}\n".format(self.referencetime[0])) + f.write(f"{self.referencetime[0]:g}\n") elif self.referencetimeOption == 2: # item 13 f.write( @@ -644,24 +641,22 @@ def write_file(self, check=False): ) ) # item 14 - f.write("{}\n".format(self.stoptimeoption)) + f.write(f"{self.stoptimeoption}\n") if self.stoptimeoption == 3: # item 15 - f.write("{:g}\n".format(self.stoptime + 1)) + f.write(f"{self.stoptime + 1:g}\n") # item 16 if self.simulationtype == 3 or self.simulationtype == 4: - f.write("{}\n".format(self.timepointoption)) + f.write(f"{self.timepointoption}\n") if self.timepointoption == 1: # item 17 f.write( - "{} {}\n".format( - self.timepointdata[0], self.timepointdata[1][0] - ) + f"{self.timepointdata[0]} {self.timepointdata[1][0]}\n" ) elif self.timepointoption == 2: # item 18 - f.write("{}\n".format(self.timepointdata[0])) + f.write(f"{self.timepointdata[0]}\n") # item 19 tp = self.timepointdata[1] v = Util2d( @@ -675,21 +670,21 @@ def write_file(self, check=False): f.write(v.string) # item 20 - f.write("{}\n".format(self.zonedataoption)) + f.write(f"{self.zonedataoption}\n") if self.zonedataoption == 2: # item 21 - f.write("{}\n".format(self.stopzone)) + f.write(f"{self.stopzone}\n") # item 22 f.write(self.zones.get_file_entry()) # item 23 - f.write("{}\n".format(self.retardationfactoroption)) + f.write(f"{self.retardationfactoroption}\n") if self.retardationfactoroption == 2: # item 24 f.write(self.retardation.get_file_entry()) # item 25 - f.write("{}\n".format(len(self.particlegroups))) + f.write(f"{len(self.particlegroups)}\n") for pg in self.particlegroups: pg.write(f, ws=self.parent.model_ws) diff --git a/flopy/mt3d/mt.py b/flopy/mt3d/mt.py index 3d2a7a8df..91b53054a 100644 --- a/flopy/mt3d/mt.py +++ b/flopy/mt3d/mt.py @@ -149,12 +149,10 @@ def __init__( # Check whether specified ftlfile exists in model directory; if not, # warn user if os.path.isfile( - os.path.join(self.model_ws, str(modelname + "." + namefile_ext)) + os.path.join(self.model_ws, f"{modelname}.{namefile_ext}") ): with open( - os.path.join( - self.model_ws, str(modelname + "." + namefile_ext) - ) + os.path.join(self.model_ws, f"{modelname}.{namefile_ext}") ) as nm_file: for line in nm_file: if line[0:3] == "FTL": @@ -188,8 +186,7 @@ def __init__( "file format" ) print( - "Switching ftlfree from " - "{} to {}".format(self.ftlfree, not self.ftlfree) + f"Switching ftlfree from {self.ftlfree} to {not self.ftlfree}" ) self.ftlfree = not self.ftlfree # Flip the bool @@ -209,11 +206,7 @@ def __init__( # external_path = os.path.join(model_ws, external_path) if os.path.exists(external_path): - print( - "Note: external_path " - + str(external_path) - + " already exists" - ) + print(f"Note: external_path {external_path} already exists") # assert os.path.exists(external_path),'external_path does not exist' else: os.mkdir(external_path) @@ -407,7 +400,7 @@ def write_name_file(self): """ fn_path = os.path.join(self.model_ws, self.namefile) f_nam = open(fn_path, "w") - f_nam.write("{}\n".format(self.heading)) + f_nam.write(f"{self.heading}\n") f_nam.write( "{:14s} {:5d} {}\n".format( self.lst.name[0], @@ -420,16 +413,14 @@ def write_name_file(self): if self.ftlfree: ftlfmt = "FREE" f_nam.write( - "{:14s} {:5d} {} {}\n".format( - "FTL", self.ftlunit, self.ftlfilename, ftlfmt - ) + f"{'FTL':14s} {self.ftlunit:5d} {self.ftlfilename} {ftlfmt}\n" ) # write file entries in name file f_nam.write(str(self.get_name_file_entries())) # write the external files for u, f in zip(self.external_units, self.external_fnames): - f_nam.write("DATA {:5d} {}\n".format(u, f)) + f_nam.write(f"DATA {u:5d} {f}\n") # write the output files for u, f, b in zip( @@ -438,9 +429,9 @@ def write_name_file(self): if u == 0: continue if b: - f_nam.write("DATA(BINARY) {:5d} {} REPLACE\n".format(u, f)) + f_nam.write(f"DATA(BINARY) {u:5d} {f} REPLACE\n") else: - f_nam.write("DATA {:5d} {}\n".format(u, f)) + f_nam.write(f"DATA {u:5d} {f}\n") f_nam.close() return @@ -508,9 +499,7 @@ def load( if verbose: sys.stdout.write( - "\nCreating new model with name: {}\n{}\n\n".format( - modelname, 50 * "-" - ) + f"\nCreating new model with name: {modelname}\n{50 * '-'}\n\n" ) mt = cls( modelname=modelname, @@ -527,7 +516,7 @@ def load( # read name file namefile_path = os.path.join(mt.model_ws, f) if not os.path.isfile(namefile_path): - raise IOError("cannot find name file: " + str(namefile_path)) + raise IOError(f"cannot find name file: {namefile_path}") try: ext_unit_dict = mfreadnam.parsenamefile( namefile_path, mt.mfnam_packages, verbose=verbose @@ -537,7 +526,7 @@ def load( # print(str(e)) # return None raise Exception( - "error loading name file entries from file:\n" + str(e) + f"error loading name file entries from file:\n{e!s}" ) if mt.verbose: @@ -588,12 +577,10 @@ def load( btn.filename, mt, ext_unit_dict=ext_unit_dict ) except Exception as e: - raise Exception("error loading BTN: {0}".format(str(e))) + raise Exception(f"error loading BTN: {e!s}") files_successfully_loaded.append(btn.filename) if mt.verbose: - sys.stdout.write( - " {:4s} package load...success\n".format(pck.name[0]) - ) + sys.stdout.write(f" {pck.name[0]:4s} package load...success\n") ext_unit_dict.pop(btn_key).filehandle.close() ncomp = mt.btn.ncomp # reserved unit numbers for .ucn, s.ucn, .obs, .mas, .cnf @@ -643,9 +630,7 @@ def load( files_successfully_loaded.append(item.filename) if mt.verbose: sys.stdout.write( - " {:4s} package load...success\n".format( - pck.name[0] - ) + f" {pck.name[0]:4s} package load...success\n" ) except BaseException as o: if mt.verbose: @@ -662,25 +647,19 @@ def load( files_successfully_loaded.append(item.filename) if mt.verbose: sys.stdout.write( - " {:4s} package load...success\n".format( - pck.name[0] - ) + f" {pck.name[0]:4s} package load...success\n" ) else: if mt.verbose: sys.stdout.write( - " {:4s} package load...skipped\n".format( - item.filetype - ) + f" {item.filetype:4s} package load...skipped\n" ) files_not_loaded.append(item.filename) elif "data" not in item.filetype.lower(): files_not_loaded.append(item.filename) if mt.verbose: sys.stdout.write( - " {:4s} package load...skipped\n".format( - item.filetype - ) + f" {item.filetype:4s} package load...skipped\n" ) elif "data" in item.filetype.lower(): if mt.verbose: @@ -725,7 +704,7 @@ def load( "successfully loaded.".format(len(files_successfully_loaded)) ) for fname in files_successfully_loaded: - print(" " + os.path.basename(fname)) + print(f" {os.path.basename(fname)}") if len(files_not_loaded) > 0: print( " The following {0} packages were not loaded.".format( @@ -733,7 +712,7 @@ def load( ) ) for fname in files_not_loaded: - print(" " + os.path.basename(fname)) + print(f" {os.path.basename(fname)}") print("\n") # return model object @@ -755,7 +734,7 @@ def load_mas(fname): """ if not os.path.isfile(fname): - raise Exception("Could not find file: {}".format(fname)) + raise Exception(f"Could not find file: {fname}") dtype = [ ("time", float), ("total_in", float), @@ -792,7 +771,7 @@ def load_obs(fname): obs = [] if not os.path.isfile(fname): - raise Exception("Could not find file: {}".format(fname)) + raise Exception(f"Could not find file: {fname}") with open(fname, "r") as f: line = f.readline() if line.strip() != firstline: @@ -815,7 +794,7 @@ def load_obs(fname): k = int(ll.pop(0)) i = int(ll.pop(0)) j = int(ll.pop(0)) - obsnam = "({}, {}, {})".format(k, i, j) + obsnam = f"({k}, {i}, {j})" if obsnam in obs: obsnam += str(len(obs) + 1) # make obs name unique obs.append(obsnam) diff --git a/flopy/mt3d/mtadv.py b/flopy/mt3d/mtadv.py index 43f4d9158..fed44b9bb 100644 --- a/flopy/mt3d/mtadv.py +++ b/flopy/mt3d/mtadv.py @@ -336,10 +336,10 @@ def load(cls, f, model, ext_unit_dict=None): if len(line[30:40].strip()) > 0: nadvfd = int(line[30:40]) if model.verbose: - print(" MIXELM {}".format(mixelm)) - print(" PERCEL {}".format(nadvfd)) - print(" MXPART {}".format(mxpart)) - print(" NADVFD {}".format(nadvfd)) + print(f" MIXELM {mixelm}") + print(f" PERCEL {nadvfd}") + print(f" MXPART {mxpart}") + print(f" NADVFD {nadvfd}") # Item B2: ITRACK WD itrack = None @@ -351,8 +351,8 @@ def load(cls, f, model, ext_unit_dict=None): itrack = int(line[0:10]) wd = float(line[10:20]) if model.verbose: - print(" ITRACK {}".format(itrack)) - print(" WD {}".format(wd)) + print(f" ITRACK {itrack}") + print(f" WD {wd}") # Item B3: DCEPS, NPLANE, NPL, NPH, NPMIN, NPMAX dceps = None @@ -372,12 +372,12 @@ def load(cls, f, model, ext_unit_dict=None): npmin = int(line[40:50]) npmax = int(line[50:60]) if model.verbose: - print(" DCEPS {}".format(dceps)) - print(" NPLANE {}".format(nplane)) - print(" NPL {}".format(npl)) - print(" NPH {}".format(nph)) - print(" NPMIN {}".format(npmin)) - print(" NPMAX {}".format(npmax)) + print(f" DCEPS {dceps}") + print(f" NPLANE {nplane}") + print(f" NPL {npl}") + print(f" NPH {nph}") + print(f" NPMIN {npmin}") + print(f" NPMAX {npmax}") # Item B4: INTERP, NLSINK, NPSINK interp = None @@ -391,9 +391,9 @@ def load(cls, f, model, ext_unit_dict=None): nlsink = int(line[10:20]) npsink = int(line[20:30]) if model.verbose: - print(" INTERP {}".format(interp)) - print(" NLSINK {}".format(nlsink)) - print(" NPSINK {}".format(npsink)) + print(f" INTERP {interp}") + print(f" NLSINK {nlsink}") + print(f" NPSINK {npsink}") # Item B5: DCHMOC dchmoc = None @@ -403,7 +403,7 @@ def load(cls, f, model, ext_unit_dict=None): line = f.readline() dchmoc = float(line[0:10]) if model.verbose: - print(" DCHMOC {}".format(dchmoc)) + print(f" DCHMOC {dchmoc}") if openfile: f.close() diff --git a/flopy/mt3d/mtbtn.py b/flopy/mt3d/mtbtn.py index 97a642dfc..47b6a5c22 100644 --- a/flopy/mt3d/mtbtn.py +++ b/flopy/mt3d/mtbtn.py @@ -228,7 +228,7 @@ def __init__( extension="btn", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): if unitnumber is None: @@ -370,14 +370,14 @@ def __init__( self.sconc.append(u3d) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "sconc" + str(icomp) + name = f"sconc{icomp}" val = 0.0 if name in kwargs: val = kwargs.pop(name) else: print( - "BTN: setting sconc for component {} " - "to zero, kwarg name {}".format(icomp, name) + f"BTN: setting sconc for component {icomp} " + f"to zero, kwarg name {name}" ) u3d = Util3d( model, @@ -393,8 +393,7 @@ def __init__( # Check to make sure that all kwargs have been consumed if len(list(kwargs.keys())) > 0: raise Exception( - "BTN error: unrecognized kwargs: " - + " ".join(list(kwargs.keys())) + f"BTN error: unrecognized kwargs: {' '.join(list(kwargs.keys()))}" ) # Finally add self to parent's package list and return @@ -689,7 +688,7 @@ def write_file(self): f_btn = open(self.fn_path, "w") # A1,2 - f_btn.write("#{0:s}\n#{1:s}\n".format(self.heading1, self.heading2)) + f_btn.write(f"#{self.heading1}\n#{self.heading2}\n") # A3; Keywords # Build a string of the active keywords @@ -725,9 +724,7 @@ def write_file(self): ) # A4 - f_btn.write( - "{0:4s}{1:4s}{2:4s}\n".format(self.tunit, self.lunit, self.munit) - ) + f_btn.write(f"{self.tunit:4s}{self.lunit:4s}{self.munit:4s}\n") # A5 if self.parent.adv != None: @@ -780,25 +777,23 @@ def write_file(self): f_btn.write(self.sconc[s].get_file_entry()) # A14 - f_btn.write("{0:10.0E}{1:10.2E}\n".format(self.cinact, self.thkmin)) + f_btn.write(f"{self.cinact:10.0E}{self.thkmin:10.2E}\n") # A15 f_btn.write( - "{0:10d}{1:10d}{2:10d}{3:10d}".format( - self.ifmtcn, self.ifmtnp, self.ifmtrf, self.ifmtdp - ) + f"{self.ifmtcn:10d}{self.ifmtnp:10d}{self.ifmtrf:10d}{self.ifmtdp:10d}" ) if self.savucn == True: ss = "T" else: ss = "F" - f_btn.write("{0:>10s}\n".format(ss)) + f_btn.write(f"{ss:>10s}\n") # A16, A17 if self.timprs is None: - f_btn.write("{0:10d}\n".format(self.nprs)) + f_btn.write(f"{self.nprs:10d}\n") else: - f_btn.write("{0:10d}\n".format(len(self.timprs))) + f_btn.write(f"{len(self.timprs):10d}\n") timprs = Util2d( self.parent, (len(self.timprs),), @@ -812,10 +807,10 @@ def write_file(self): # A18, A19 if self.obs is None: - f_btn.write("{0:10d}{1:10d}\n".format(0, self.nprobs)) + f_btn.write(f"{0:10d}{self.nprobs:10d}\n") else: nobs = self.obs.shape[0] - f_btn.write("{0:10d}{1:10d}\n".format(nobs, self.nprobs)) + f_btn.write(f"{nobs:10d}{self.nprobs:10d}\n") for i in range(nobs): f_btn.write( "{0:10d}{1:10d}{2:10d}\n".format( @@ -830,13 +825,11 @@ def write_file(self): ss = "T" else: ss = "F" - f_btn.write("{0:>10s}{1:10d}\n".format(ss, self.nprmas)) + f_btn.write(f"{ss:>10s}{self.nprmas:10d}\n") # A21, 22, 23 PERLEN, NSTP, TSMULT for t in range(self.nper): - s = "{0:10G}{1:10d}{2:10G}".format( - self.perlen[t], self.nstp[t], self.tsmult[t] - ) + s = f"{self.perlen[t]:10G}{self.nstp[t]:10d}{self.tsmult[t]:10G}" if self.ssflag is not None: s += " " + self.ssflag[t] s += "\n" @@ -917,7 +910,7 @@ def load(cls, f, model, ext_unit_dict=None): m_arr[0].strip().isdigit() is not True ): # If m_arr[0] is not a digit, it is a keyword if model.verbose: - print(" loading optional keywords: {}".format(line.strip())) + print(f" loading optional keywords: {line.strip()}") for i in range(0, len(m_arr)): if m_arr[i].upper() == "MODFLOWSTYLEARRAYS": MFStyleArr = True @@ -956,12 +949,12 @@ def load(cls, f, model, ext_unit_dict=None): except: mcomp = 1 if model.verbose: - print(" NLAY {}".format(nlay)) - print(" NROW {}".format(nrow)) - print(" NCOL {}".format(ncol)) - print(" NPER {}".format(nper)) - print(" NCOMP {}".format(ncomp)) - print(" MCOMP {}".format(mcomp)) + print(f" NLAY {nlay}") + print(f" NROW {nrow}") + print(f" NCOL {ncol}") + print(f" NPER {nper}") + print(f" NCOMP {ncomp}") + print(f" MCOMP {mcomp}") if model.verbose: print(" loading TUNIT, LUNIT, MUNIT...") @@ -970,21 +963,21 @@ def load(cls, f, model, ext_unit_dict=None): lunit = line[4:8] munit = line[8:12] if model.verbose: - print(" TUNIT {}".format(tunit)) - print(" LUNIT {}".format(lunit)) - print(" MUNIT {}".format(munit)) + print(f" TUNIT {tunit}") + print(f" LUNIT {lunit}") + print(f" MUNIT {munit}") if model.verbose: print(" loading TRNOP...") trnop = f.readline()[:20].strip().split() if model.verbose: - print(" TRNOP {}".format(trnop)) + print(f" TRNOP {trnop}") if model.verbose: print(" loading LAYCON...") laycon = Util2d.load_txt((nlay,), f, np.int32, "(40I2)") if model.verbose: - print(" LAYCON {}".format(laycon)) + print(f" LAYCON {laycon}") if model.verbose: print(" loading DELR...") @@ -998,7 +991,7 @@ def load(cls, f, model, ext_unit_dict=None): array_format="mt3d", ) if model.verbose: - print(" DELR {}".format(delr)) + print(f" DELR {delr}") if model.verbose: print(" loading DELC...") @@ -1012,7 +1005,7 @@ def load(cls, f, model, ext_unit_dict=None): array_format="mt3d", ) if model.verbose: - print(" DELC {}".format(delc)) + print(f" DELC {delc}") if model.verbose: print(" loading HTOP...") @@ -1026,7 +1019,7 @@ def load(cls, f, model, ext_unit_dict=None): array_format="mt3d", ) if model.verbose: - print(" HTOP {}".format(htop)) + print(f" HTOP {htop}") if model.verbose: print(" loading DZ...") @@ -1040,7 +1033,7 @@ def load(cls, f, model, ext_unit_dict=None): array_format="mt3d", ) if model.verbose: - print(" DZ {}".format(dz)) + print(f" DZ {dz}") if model.verbose: print(" loading PRSITY...") @@ -1054,7 +1047,7 @@ def load(cls, f, model, ext_unit_dict=None): array_format="mt3d", ) if model.verbose: - print(" PRSITY {}".format(prsity)) + print(f" PRSITY {prsity}") if model.verbose: print(" loading ICBUND...") @@ -1068,7 +1061,7 @@ def load(cls, f, model, ext_unit_dict=None): array_format="mt3d", ) if model.verbose: - print(" ICBUND {}".format(icbund)) + print(f" ICBUND {icbund}") if model.verbose: print(" loading SCONC...") @@ -1084,9 +1077,9 @@ def load(cls, f, model, ext_unit_dict=None): ) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "sconc" + str(icomp) + name = f"sconc{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u3d = Util3d.load( f, model, @@ -1098,7 +1091,7 @@ def load(cls, f, model, ext_unit_dict=None): ) kwargs[name] = u3d if model.verbose: - print(" SCONC {}".format(sconc)) + print(f" SCONC {sconc}") if model.verbose: print(" loading CINACT, THCKMIN...") @@ -1109,8 +1102,8 @@ def load(cls, f, model, ext_unit_dict=None): except: thkmin = 0.01 if model.verbose: - print(" CINACT {}".format(cinact)) - print(" THKMIN {}".format(thkmin)) + print(f" CINACT {cinact}") + print(f" THKMIN {thkmin}") if model.verbose: print(" loading IFMTCN, IFMTNP, IFMTRF, IFMTDP, SAVUCN...") @@ -1123,18 +1116,18 @@ def load(cls, f, model, ext_unit_dict=None): if "t" in line[40:50].lower(): savucn = True if model.verbose: - print(" IFMTCN {}".format(ifmtcn)) - print(" IFMTNP {}".format(ifmtnp)) - print(" IFMTRF {}".format(ifmtrf)) - print(" IFMTDP {}".format(ifmtdp)) - print(" SAVUCN {}".format(savucn)) + print(f" IFMTCN {ifmtcn}") + print(f" IFMTNP {ifmtnp}") + print(f" IFMTRF {ifmtrf}") + print(f" IFMTDP {ifmtdp}") + print(f" SAVUCN {savucn}") if model.verbose: print(" loading NPRS...") line = f.readline() nprs = int(line[0:10]) if model.verbose: - print(" NPRS {}".format(nprs)) + print(f" NPRS {nprs}") timprs = None if nprs > 0: @@ -1142,7 +1135,7 @@ def load(cls, f, model, ext_unit_dict=None): print(" loading TIMPRS...") timprs = Util2d.load_txt((nprs,), f, np.float32, "(8F10.0)") if model.verbose: - print(" TIMPRS {}".format(timprs)) + print(f" TIMPRS {timprs}") if model.verbose: print(" loading NOBS, NPROBS...") @@ -1153,8 +1146,8 @@ def load(cls, f, model, ext_unit_dict=None): except: nprobs = 1 if model.verbose: - print(" NOBS {}".format(nobs)) - print(" NPROBS {}".format(nprobs)) + print(f" NOBS {nobs}") + print(f" NPROBS {nprobs}") obs = None if nobs > 0: @@ -1169,7 +1162,7 @@ def load(cls, f, model, ext_unit_dict=None): obs.append([k, i, j]) obs = np.array(obs) - 1 if model.verbose: - print(" OBS {}".format(obs)) + print(f" OBS {obs}") if model.verbose: print(" loading CHKMAS, NPRMAS...") @@ -1182,8 +1175,8 @@ def load(cls, f, model, ext_unit_dict=None): except: nprmas = 1 if model.verbose: - print(" CHKMAS {}".format(chkmas)) - print(" NPRMAS {}".format(nprmas)) + print(f" CHKMAS {chkmas}") + print(f" NPRMAS {nprmas}") if model.verbose: print( @@ -1219,15 +1212,15 @@ def load(cls, f, model, ext_unit_dict=None): ttsmax.append(float(line[30:40])) if model.verbose: - print(" PERLEN {}".format(perlen)) - print(" NSTP {}".format(nstp)) - print(" TSMULT {}".format(tsmult)) - print(" SSFLAG {}".format(ssflag)) - print(" TSLNGH {}".format(tslngh)) - print(" DT0 {}".format(dt0)) - print(" MXSTRN {}".format(mxstrn)) - print(" TTSMULT {}".format(ttsmult)) - print(" TTSMAX {}".format(ttsmax)) + print(f" PERLEN {perlen}") + print(f" NSTP {nstp}") + print(f" TSMULT {tsmult}") + print(f" SSFLAG {ssflag}") + print(f" TSLNGH {tslngh}") + print(f" DT0 {dt0}") + print(f" MXSTRN {mxstrn}") + print(f" TTSMULT {ttsmult}") + print(f" TTSMAX {ttsmax}") if openfile: f.close() @@ -1289,7 +1282,7 @@ def load(cls, f, model, ext_unit_dict=None): ttsmax=ttsmax, unitnumber=unitnumber, filenames=filenames, - **kwargs + **kwargs, ) @staticmethod diff --git a/flopy/mt3d/mtdsp.py b/flopy/mt3d/mtdsp.py index 1179ac261..7c287f2e2 100644 --- a/flopy/mt3d/mtdsp.py +++ b/flopy/mt3d/mtdsp.py @@ -113,7 +113,7 @@ def __init__( multiDiff=False, unitnumber=None, filenames=None, - **kwargs + **kwargs, ): if unitnumber is None: @@ -200,7 +200,7 @@ def __init__( ) self.dmcoef.append(u2or3) for icomp in range(2, nmcomp + 1): - name = "dmcoef" + str(icomp) + name = f"dmcoef{icomp}" val = 0.0 if name in list(kwargs.keys()): val = kwargs.pop(name) @@ -401,7 +401,7 @@ def load( ) if model.mcomp > 1: for icomp in range(2, model.mcomp + 1): - name = "dmcoef" + str(icomp) + name = f"dmcoef{icomp}" u3d = Util3d.load( f, model, @@ -450,7 +450,7 @@ def load( multiDiff=multiDiff, unitnumber=unitnumber, filenames=filenames, - **kwargs + **kwargs, ) @staticmethod diff --git a/flopy/mt3d/mtgcg.py b/flopy/mt3d/mtgcg.py index a6d883e54..1f13fcebc 100644 --- a/flopy/mt3d/mtgcg.py +++ b/flopy/mt3d/mtgcg.py @@ -144,12 +144,8 @@ def write_file(self): """ # Open file for writing f_gcg = open(self.fn_path, "w") - f_gcg.write( - "{} {} {} {}\n".format( - self.mxiter, self.iter1, self.isolve, self.ncrs - ) - ) - f_gcg.write("{} {} {}\n".format(self.accl, self.cclose, self.iprgcg)) + f_gcg.write(f"{self.mxiter} {self.iter1} {self.isolve} {self.ncrs}\n") + f_gcg.write(f"{self.accl} {self.cclose} {self.iprgcg}\n") f_gcg.close() return @@ -210,10 +206,10 @@ def load(cls, f, model, ext_unit_dict=None): isolve = int(t[2]) ncrs = int(t[3]) if model.verbose: - print(" MXITER {}".format(mxiter)) - print(" ITER1 {}".format(iter1)) - print(" ISOLVE {}".format(isolve)) - print(" NCRS {}".format(ncrs)) + print(f" MXITER {mxiter}") + print(f" ITER1 {iter1}") + print(f" ISOLVE {isolve}") + print(f" NCRS {ncrs}") # Item F2: ACCL, CCLOSE, IPRGCG if model.verbose: @@ -224,9 +220,9 @@ def load(cls, f, model, ext_unit_dict=None): cclose = float(t[1]) iprgcg = int(t[2]) if model.verbose: - print(" ACCL {}".format(accl)) - print(" CCLOSE {}".format(cclose)) - print(" IPRGCG {}".format(iprgcg)) + print(f" ACCL {accl}") + print(f" CCLOSE {cclose}") + print(f" IPRGCG {iprgcg}") if openfile: f.close() diff --git a/flopy/mt3d/mtlkt.py b/flopy/mt3d/mtlkt.py index 7728feb9e..962af7c87 100644 --- a/flopy/mt3d/mtlkt.py +++ b/flopy/mt3d/mtlkt.py @@ -117,7 +117,7 @@ def __init__( unitnumber=None, filenames=None, iprn=-1, - **kwargs + **kwargs, ): # set default unit number of one is not specified @@ -146,9 +146,9 @@ def __init__( ): # already has extension fname = "{}.{}".format(*filenames[1].split(".", 1)) else: - fname = "{}.{}".format(filenames[1], ext) + fname = f"{filenames[1]}.{ext}" else: - fname = "{}.{}".format(model.name, ext) + fname = f"{model.name}.{ext}" model.add_output_file( icbclk, fname=fname, @@ -208,14 +208,13 @@ def __init__( if ncomp > 1: for icomp in range(2, ncomp + 1): for base_name, attr in zip(["coldlak"], [self.coldlak]): - name = "{0}{1}".format(base_name, icomp) + name = f"{base_name}{icomp}" if name in kwargs: val = kwargs.pop(name) else: print( - "LKT: setting {0} for component {1} to zero, kwarg name {2}".format( - base_name, icomp, name - ) + f"LKT: setting {base_name} for component {icomp} " + f"to zero, kwarg name {name}" ) val = 0.0 u2d = Util2d( @@ -292,7 +291,7 @@ def write_file(self): f_lkt, single_per=kper ) else: - f_lkt.write("{}\n".format(0)) + f_lkt.write("0\n") f_lkt.close() return @@ -380,10 +379,10 @@ def load( ietlak = int(vals[3]) if model.verbose: - print(" NLKINIT {}".format(nlkinit)) - print(" MXLKBC {}".format(mxlkbc)) - print(" ICBCLK {}".format(icbclk)) - print(" IETLAK {}".format(ietlak)) + print(f" NLKINIT {nlkinit}") + print(f" MXLKBC {mxlkbc}") + print(f" ICBCLK {icbclk}") + print(f" IETLAK {ietlak}") if ietlak == 0: print( " Mass does not exit the model via simulated lake evaporation " @@ -420,9 +419,9 @@ def load( if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "coldlak" + str(icomp) + name = f"coldlak{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u2d = Util2d.load( f, model, @@ -443,9 +442,7 @@ def load( for iper in range(nper): if model.verbose: print( - " loading lkt boundary condition data for kper {0:5d}".format( - iper + 1 - ) + f" loading lkt boundary condition data for kper {iper + 1:5d}" ) # Item 3: NTMP: An integer value corresponding to the number of @@ -457,9 +454,7 @@ def load( vals = line.strip().split() ntmp = int(vals[0]) if model.verbose: - print( - " {0:5d} lkt boundary conditions specified ".format(ntmp) - ) + print(f" {ntmp:5d} lkt boundary conditions specified ") if (iper == 0) and (ntmp < 0): print(" ntmp < 0 not allowed for first stress period ") if (iper > 0) and (ntmp < 0): @@ -521,7 +516,7 @@ def load( lk_stress_period_data=lk_stress_period_data, unitnumber=unitnumber, filenames=filenames, - **kwargs + **kwargs, ) @staticmethod @@ -537,7 +532,7 @@ def get_default_dtype(ncomp=1): ] if ncomp > 1: for icomp in range(2, ncomp + 1): - comp_name = "cbclk({0:02d})".format(icomp) + comp_name = f"cbclk({icomp:02d})" type_list.append((comp_name, np.float32)) dtype = np.dtype(type_list) return dtype diff --git a/flopy/mt3d/mtrct.py b/flopy/mt3d/mtrct.py index b2995964f..2750881eb 100644 --- a/flopy/mt3d/mtrct.py +++ b/flopy/mt3d/mtrct.py @@ -173,7 +173,7 @@ def __init__( extension="rct", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): """ Package constructor. @@ -263,14 +263,14 @@ def __init__( self.srconc.append(u3d) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "srconc" + str(icomp) + name = f"srconc{icomp}" val = 0.0 if name in kwargs: val = kwargs.pop(name) else: print( - "RCT: setting srconc for component {} to zero, " - "kwarg name {}".format(icomp, name) + f"RCT: setting srconc for component {icomp} to zero, " + f"kwarg name {name}" ) u3d = Util3d( model, @@ -299,7 +299,7 @@ def __init__( self.sp1.append(u3d) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "sp1" + str(icomp) + name = f"sp1{icomp}" val = 0.0 if name in kwargs: val = kwargs.pop(name) @@ -335,7 +335,7 @@ def __init__( self.sp2.append(u3d) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "sp2" + str(icomp) + name = f"sp2{icomp}" val = 0.0 if name in kwargs: val = kwargs.pop(name) @@ -371,7 +371,7 @@ def __init__( self.rc1.append(u3d) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "rc1" + str(icomp) + name = f"rc1{icomp}" val = 0.0 if name in kwargs: val = kwargs.pop(name) @@ -407,7 +407,7 @@ def __init__( self.rc2.append(u3d) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "rc2" + str(icomp) + name = f"rc2{icomp}" val = 0.0 if name in kwargs: val = kwargs.pop(name) @@ -568,10 +568,10 @@ def load( except: igetsc = 0 if model.verbose: - print(" ISOTHM {}".format(isothm)) - print(" IREACT {}".format(ireact)) - print(" IRCTOP {}".format(irctop)) - print(" IGETSC {}".format(igetsc)) + print(f" ISOTHM {isothm}") + print(f" IREACT {ireact}") + print(f" IRCTOP {irctop}") + print(f" IGETSC {igetsc}") # Item E2A: RHOB rhob = None @@ -588,7 +588,7 @@ def load( array_format="mt3d", ) if model.verbose: - print(" RHOB {}".format(rhob)) + print(f" RHOB {rhob}") # Item E2A: PRSITY2 prsity2 = None @@ -605,7 +605,7 @@ def load( array_format="mt3d", ) if model.verbose: - print(" PRSITY2 {}".format(prsity2)) + print(f" PRSITY2 {prsity2}") # Item E2C: SRCONC srconc = None @@ -622,12 +622,12 @@ def load( array_format="mt3d", ) if model.verbose: - print(" SRCONC {}".format(srconc)) + print(f" SRCONC {srconc}") if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "srconc" + str(icomp) + name = f"srconc{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u3d = Util3d.load( f, model, @@ -639,7 +639,7 @@ def load( ) kwargs[name] = u3d if model.verbose: - print(" SRCONC{} {}".format(icomp, u3d)) + print(f" SRCONC{icomp} {u3d}") # Item E3: SP1 sp1 = None @@ -656,12 +656,12 @@ def load( array_format="mt3d", ) if model.verbose: - print(" SP1 {}".format(sp1)) + print(f" SP1 {sp1}") if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "sp1" + str(icomp) + name = f"sp1{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u3d = Util3d.load( f, model, @@ -673,7 +673,7 @@ def load( ) kwargs[name] = u3d if model.verbose: - print(" SP1{} {}".format(icomp, u3d)) + print(f" SP1{icomp} {u3d}") # Item E4: SP2 sp2 = None @@ -690,12 +690,12 @@ def load( array_format="mt3d", ) if model.verbose: - print(" SP2 {}".format(sp2)) + print(f" SP2 {sp2}") if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "sp2" + str(icomp) + name = f"sp2{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u3d = Util3d.load( f, model, @@ -707,7 +707,7 @@ def load( ) kwargs[name] = u3d if model.verbose: - print(" SP2{} {}".format(icomp, u3d)) + print(f" SP2{icomp} {u3d}") # Item E5: RC1 rc1 = None @@ -724,12 +724,12 @@ def load( array_format="mt3d", ) if model.verbose: - print(" RC1 {}".format(rc1)) + print(f" RC1 {rc1}") if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "rc1" + str(icomp) + name = f"rc1{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u3d = Util3d.load( f, model, @@ -741,7 +741,7 @@ def load( ) kwargs[name] = u3d if model.verbose: - print(" RC1{} {}".format(icomp, u3d)) + print(f" RC1{icomp} {u3d}") # Item E6: RC2 rc2 = None @@ -758,12 +758,12 @@ def load( array_format="mt3d", ) if model.verbose: - print(" RC2 {}".format(rc2)) + print(f" RC2 {rc2}") if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "rc2" + str(icomp) + name = f"rc2{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u3d = Util3d.load( f, model, @@ -775,7 +775,7 @@ def load( ) kwargs[name] = u3d if model.verbose: - print(" RC2{} {}".format(icomp, u3d)) + print(f" RC2{icomp} {u3d}") if openfile: f.close() @@ -803,7 +803,7 @@ def load( rc2=rc2, unitnumber=unitnumber, filenames=filenames, - **kwargs + **kwargs, ) @staticmethod diff --git a/flopy/mt3d/mtsft.py b/flopy/mt3d/mtsft.py index 85273d4ec..c0adb1efa 100644 --- a/flopy/mt3d/mtsft.py +++ b/flopy/mt3d/mtsft.py @@ -205,7 +205,7 @@ def __init__( filenames=None, dtype=None, extension="sft", - **kwargs + **kwargs, ): # set default unit number of one is not specified @@ -234,9 +234,9 @@ def __init__( ): # already has extension fname = "{}.{}".format(*filenames[1].split(".", 1)) else: - fname = "{}.{}".format(filenames[1], ext) + fname = f"{filenames[1]}.{ext}" else: - fname = "{}.{}".format(model.name, ext) + fname = f"{model.name}.{ext}" model.add_output_file( abs(ioutobs), fname=fname, @@ -319,7 +319,7 @@ def __init__( for base_name, attr in zip( ["coldsf", "dispsf"], [self.coldsf, self.dispsf] ): - name = "{0}{1}".format(base_name, icomp) + name = f"{base_name}{icomp}" if name in kwargs: val = kwargs.pop(name) else: @@ -373,7 +373,7 @@ def get_default_dtype(ncomp=1): ] if ncomp > 1: for icomp in range(1, ncomp): - comp_name = "cbcsf{0:d}".format(icomp) + comp_name = f"cbcsf{icomp}" type_list.append((comp_name, np.float32)) dtype = np.dtype(type_list) return dtype @@ -439,15 +439,14 @@ def write_file(self): f.write(dispsf.get_file_entry()) # Item 5 - f.write("{0:10d} # nobssf\n".format(self.nobssf)) + f.write(f"{self.nobssf:10d} # nobssf\n") # Item 6 if self.nobssf != 0: for iobs in self.obs_sf: line = ( - "{0:10d}".format(iobs) - + 26 * " " - + "# location of obs as given by position in irch list\n" + f"{iobs:10d} " + "# location of obs as given by position in irch list\n" ) f.write(line) @@ -463,7 +462,7 @@ def write_file(self): if self.sf_stress_period_data is not None: self.sf_stress_period_data.write_transient(f, single_per=kper) else: - f.write("{0:10d} # ntmp - SP {1:5d}\n".format(0, kper)) + f.write(f"{0:10d} # ntmp - SP {kper:5d}\n") f.close() return @@ -575,11 +574,11 @@ def load( ietsfr = int(vals[4]) if model.verbose: - print(" NSFINIT {}".format(nsfinit)) - print(" MXSFBC {}".format(mxsfbc)) - print(" ICBCSF {}".format(icbcsf)) - print(" IOUTOBS {}".format(ioutobs)) - print(" IETSFR {}".format(ietsfr)) + print(f" NSFINIT {nsfinit}") + print(f" MXSFBC {mxsfbc}") + print(f" ICBCSF {icbcsf}") + print(f" IOUTOBS {ioutobs}") + print(f" IETSFR {ietsfr}") if ietsfr == 0: print( " Mass does not exit the model via simulated " @@ -617,13 +616,13 @@ def load( print(" In version 1.0 of MT3D-USGS, isfsov=1 is only option") if model.verbose: - print(" ISFSOLV {}".format(isfsolv)) - print(" WIMP {}".format(wimp)) - print(" WUPS {}".format(wups)) - print(" CCLOSESF {}".format(cclosesf)) - print(" MXITERSF {}".format(mxitersf)) - print(" CRNTSF {}".format(crntsf)) - print(" IPRTXMD {}".format(iprtxmd)) + print(f" ISFSOLV {isfsolv}") + print(f" WIMP {wimp}") + print(f" WUPS {wups}") + print(f" CCLOSESF {cclosesf}") + print(f" MXITERSF {mxitersf}") + print(f" CRNTSF {crntsf}") + print(f" IPRTXMD {iprtxmd}") # Item 3 (COLDSF(NRCH)) Initial concentration if model.verbose: @@ -653,9 +652,9 @@ def load( kwargs = {} if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "coldsf" + str(icomp) + name = f"coldsf{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u2d = Util2d.load( f, model, @@ -691,9 +690,9 @@ def load( ) if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "dispsf" + str(icomp) + name = f"dispsf{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") u2d = Util2d.load( f, model, @@ -712,7 +711,7 @@ def load( m_arr = line.strip().split() nobssf = int(m_arr[0]) if model.verbose: - print(" NOBSSF {}".format(nobssf)) + print(f" NOBSSF {nobssf}") # If NOBSSF > 0, store observation segment & reach (Item 6) obs_sf = [] @@ -731,8 +730,8 @@ def load( print(" Surface water concentration observation locations:") text = "" for o in obs_sf: - text += "{} ".format(o) - print(" {}\n".format(text)) + text += f"{o} " + print(f" {text}\n") else: if model.verbose: print(" No observation points specified.") @@ -743,11 +742,7 @@ def load( # Item 7 NTMP (Transient data) if model.verbose: - print( - " loading NTMP...stress period {} of {}".format( - iper + 1, nper - ) - ) + print(f" loading NTMP...stress period {iper + 1} of {nper}") line = f.readline() m_arr = line.strip().split() ntmp = int(m_arr[0]) @@ -822,7 +817,7 @@ def load( sf_stress_period_data=sf_stress_period_data, unitnumber=unitnumber, filenames=filenames, - **kwargs + **kwargs, ) @staticmethod diff --git a/flopy/mt3d/mtssm.py b/flopy/mt3d/mtssm.py index 97421a3fc..f961efb36 100644 --- a/flopy/mt3d/mtssm.py +++ b/flopy/mt3d/mtssm.py @@ -165,7 +165,7 @@ def __init__( extension="ssm", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): if unitnumber is None: @@ -303,7 +303,7 @@ def __init__( if ncomp > 1: for icomp in range(2, ncomp + 1): val = 0.0 - name = "crch" + str(icomp) + name = f"crch{icomp}" if name in list(kwargs.keys()): val = kwargs.pop(name) else: @@ -363,7 +363,7 @@ def __init__( if ncomp > 1: for icomp in range(2, ncomp + 1): val = 0.0 - name = "cevt" + str(icomp) + name = f"cevt{icomp}" if name in list(kwargs.keys()): val = kwargs[name] kwargs.pop(name) @@ -444,7 +444,7 @@ def get_default_dtype(ncomp=1): ] if ncomp > 1: for comp in range(1, ncomp + 1): - comp_name = "cssm({0:02d})".format(comp) + comp_name = f"cssm({comp:02d})" type_list.append((comp_name, np.float32)) dtype = np.dtype(type_list) return dtype @@ -466,7 +466,7 @@ def write_file(self): f_ssm.write(" F F F F F F F F F F\n") - f_ssm.write("{:10d}\n".format(self.mxss)) + f_ssm.write(f"{self.mxss:10d}\n") # Loop through each stress period and write ssm information nper = self.parent.nper @@ -484,7 +484,7 @@ def write_file(self): incrch = max(incrch, incrchicomp) if incrch == 1: break - f_ssm.write("{:10d}\n".format(incrch)) + f_ssm.write(f"{incrch:10d}\n") if incrch == 1: for t2d in self.crch: u2d = t2d[kper] @@ -500,7 +500,7 @@ def write_file(self): incevt = max(incevt, incevticomp) if incevt == 1: break - f_ssm.write("{:10d}\n".format(incevt)) + f_ssm.write(f"{incevt:10d}\n") if incevt == 1: for t2d in self.cevt: u2d = t2d[kper] @@ -618,16 +618,16 @@ def load( else: fnew4 = "F" if model.verbose: - print(" FWEL {}".format(fwel)) - print(" FDRN {}".format(fdrn)) - print(" FRCH {}".format(frch)) - print(" FEVT {}".format(fevt)) - print(" FRIV {}".format(friv)) - print(" FGHB {}".format(fghb)) - print(" FNEW1 {}".format(fnew1)) - print(" FNEW2 {}".format(fnew2)) - print(" FNEW3 {}".format(fnew3)) - print(" FNEW4 {}".format(fnew4)) + print(f" FWEL {fwel}") + print(f" FDRN {fdrn}") + print(f" FRCH {frch}") + print(f" FEVT {fevt}") + print(f" FRIV {friv}") + print(f" FGHB {fghb}") + print(f" FNEW1 {fnew1}") + print(f" FNEW2 {fnew2}") + print(f" FNEW3 {fnew3}") + print(f" FNEW4 {fnew4}") # Override the logical settings at top of ssm file using the # modflowmodel, if it is attached to parent @@ -650,8 +650,8 @@ def load( except: issgout = 0 if model.verbose: - print(" MXSS {}".format(mxss)) - print(" ISSGOUT {}".format(issgout)) + print(f" MXSS {mxss}") + print(f" ISSGOUT {issgout}") # kwargs needed to construct crch2, crch3, etc. for multispecies kwargs = {} @@ -662,7 +662,7 @@ def load( crch = {0: t2d} if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "crch" + str(icomp) + name = f"crch{icomp}" t2d = 0.0 kwargs[name] = {0: t2d} @@ -672,7 +672,7 @@ def load( cevt = {0: t2d} if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cevt" + str(icomp) + name = f"cevt{icomp}" t2d = 0.0 kwargs[name] = {0: t2d} @@ -681,7 +681,7 @@ def load( for iper in range(nper): if model.verbose: - print(" loading ssm for kper {0:5d}".format(iper + 1)) + print(f" loading ssm for kper {iper + 1:5d}") # Item D3: INCRCH incrch = -1 @@ -708,9 +708,9 @@ def load( # Load each multispecies array if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "crch" + str(icomp) + name = f"crch{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") t = Util2d.load( f, model, @@ -748,9 +748,9 @@ def load( # Load each multispecies array if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cevt" + str(icomp) + name = f"cevt{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") t = Util2d.load( f, model, @@ -769,7 +769,7 @@ def load( line = f.readline() nss = int(line[0:10]) if model.verbose: - print(" NSS {}".format(nss)) + print(f" NSS {nss}") # Item D8: KSS, ISS, JSS, CSS, ITYPE, (CSSMS(n),n=1,NCOMP) if model.verbose: @@ -821,7 +821,7 @@ def load( stress_period_data=stress_period_data, unitnumber=unitnumber, filenames=filenames, - **kwargs + **kwargs, ) @staticmethod diff --git a/flopy/mt3d/mtuzt.py b/flopy/mt3d/mtuzt.py index eb85e9ab2..acee737ad 100644 --- a/flopy/mt3d/mtuzt.py +++ b/flopy/mt3d/mtuzt.py @@ -144,7 +144,7 @@ def __init__( extension="uzt", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): # set default unit number of one is not specified @@ -243,13 +243,13 @@ def __init__( if ncomp > 1: for icomp in range(2, ncomp + 1): val = 0.0 - name = "cuzinf" + str(icomp) + name = f"cuzinf{icomp}" if name in list(kwargs.keys()): val = kwargs.pop(name) else: print( - "UZT: setting cuzinf for component {} to zero. " - "kwarg name {}".format(icomp, name) + "UZT: setting cuzinf for component " + f"{icomp} to zero. kwarg name {name}" ) t2d = Transient2d( @@ -276,13 +276,13 @@ def __init__( if ncomp > 1: for icomp in range(2, ncomp + 1): val = 0.0 - name = "cuzet" + str(icomp) + name = f"cuzet{icomp}" if name in list(kwargs.keys()): val = kwargs.pop(name) else: print( - "UZT: setting cuzet for component {} to zero. " - "kwarg name {}".format(icomp, name) + "UZT: setting cuzet for component " + f"{icomp} to zero. kwarg name {name}" ) t2d = Transient2d( @@ -309,13 +309,13 @@ def __init__( if ncomp > 1: for icomp in range(2, ncomp + 1): val = 0.0 - name = "cgwet" + str(icomp) + name = f"cgwet{icomp}" if name in list(kwargs.keys()): val = kwargs.pop(name) else: print( - "UZT: setting cgwet for component {} to zero. " - "kwarg name {}".format(icomp, name) + "UZT: setting cgwet for component " + f"{icomp} to zero. kwarg name {name}" ) t2d = Transient2d( @@ -345,13 +345,11 @@ def write_file(self): f_uzt = open(self.fn_path, "w") # Write header - f_uzt.write("#{0:s}\n".format(self.heading1)) + f_uzt.write(f"#{self.heading1}\n") # Item 2 f_uzt.write( - "{0:10d}{1:10d} #ICBCUZ, IET\n".format( - self.icbcuz, self.iet - ) + f"{self.icbcuz:10d}{self.iet:10d} #ICBCUZ, IET\n" ) # Item 3 @@ -375,9 +373,7 @@ def write_file(self): if incuzinf == 1: break f_uzt.write( - "{0:10d} # INCUZINF - SP {1:5d}\n".format( - incuzinf, kper + 1 - ) + f"{incuzinf:10d} # INCUZINF - SP {kper + 1:5d}\n" ) if incuzinf == 1: for t2d in self.cuzinf: @@ -396,9 +392,7 @@ def write_file(self): if incuzet == 1: break f_uzt.write( - "{0:10d} # INCUZET - SP {1:5d}\n".format( - incuzet, kper + 1 - ) + f"{incuzet:10d} # INCUZET - SP {kper + 1:5d}\n" ) if incuzet == 1: for t2d in self.cuzet: @@ -416,9 +410,7 @@ def write_file(self): if incgwet == 1: break f_uzt.write( - "{0:10d} # INCGWET - SP {1:5d}\n".format( - incgwet, kper + 1 - ) + f"{incgwet:10d} # INCGWET - SP {kper + 1:5d}\n" ) if incgwet == 1: for t2d in self.cgwet: @@ -501,7 +493,7 @@ def load( while line[0:1] == "#": i = 1 if model.verbose: - print(" Comment Line " + str(i) + ": ".format(line.strip())) + print(f" Comment Line {i}: {line.strip()}") i += 1 line = f.readline() @@ -531,7 +523,7 @@ def load( cuzinf = {0: t2d} if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cuzinf" + str(icomp) + name = f"cuzinf{icomp}" t2d = Transient2d( model, (nrow, ncol), np.float32, 0.0, name=name, locat=0 ) @@ -546,7 +538,7 @@ def load( cuzet = {0: t2d} if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cuzet" + str(icomp) + name = f"cuzet{icomp}" t2d = Transient2d( model, (nrow, ncol), @@ -565,7 +557,7 @@ def load( cgwet = {0: t2d} if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cgwet" + str(icomp) + name = f"cgwet{icomp}" t2d = Transient2d( model, (nrow, ncol), @@ -583,7 +575,7 @@ def load( for iper in range(nper): if model.verbose: - print(" loading UZT data for kper {0:5d}".format(iper + 1)) + print(f" loading UZT data for kper {iper + 1:5d}") # Item 4 (INCUZINF) line = f.readline() @@ -593,10 +585,7 @@ def load( # Item 5 (CUZINF) if incuzinf >= 0: if model.verbose: - print( - " Reading CUZINF array for kper " - "{0:5d}".format(iper + 1) - ) + print(f" Reading CUZINF array for kper {iper + 1:5d}") t = Util2d.load( f, model, (nrow, ncol), np.float32, "cuzinf", ext_unit_dict ) @@ -605,9 +594,9 @@ def load( # Load each multispecies array if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cuzinf" + str(icomp) + name = f"cuzinf{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") t = Util2d.load( f, model, @@ -634,9 +623,8 @@ def load( elif incuzinf < 0 and iper > 0: if model.verbose: print( - " Reusing CUZINF array from kper " - "{0:5d}".format(iper) + " in kper " - "{0:5d}".format(iper + 1) + f" Reusing CUZINF array from kper {iper:5d}" + f" in kper {iper + 1:5d}" ) if iet != 0: @@ -648,10 +636,7 @@ def load( # Item 7 (CUZET) if incuzet >= 0: if model.verbose: - print( - " Reading CUZET array for kper " - "{0:5d}".format(iper + 1) - ) + print(f" Reading CUZET array for kper {iper + 1:5d}") t = Util2d.load( f, model, @@ -665,9 +650,9 @@ def load( # Load each multispecies array if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cuzet" + str(icomp) + name = f"cuzet{icomp}" if model.verbose: - print(" loading {}".format(name)) + print(f" loading {name}") t = Util2d.load( f, model, @@ -693,9 +678,8 @@ def load( else: if model.verbose: print( - " Reusing CUZET array from kper " - "{0:5d}".format(iper) + " in kper " - "{0:5d}".format(iper + 1) + f" Reusing CUZET array from kper {iper:5d}" + f" in kper {iper + 1:5d}" ) # Item 8 (INCGWET) @@ -706,10 +690,7 @@ def load( # Item 9 (CGWET) if model.verbose: if incuzet >= 0: - print( - " Reading CGWET array for kper " - "{0:5d}".format(iper + 1) - ) + print(f" Reading CGWET array for kper {iper + 1:5d}") t = Util2d.load( f, model, @@ -723,9 +704,9 @@ def load( # Load each multispecies array if ncomp > 1: for icomp in range(2, ncomp + 1): - name = "cgwet" + str(icomp) + name = f"cgwet{icomp}" if model.verbose: - print(" loading {}...".format(name)) + print(f" loading {name}...") t = Util2d.load( f, model, @@ -752,9 +733,8 @@ def load( elif incgwet < 0 and iper > 0: if model.verbose: print( - " Reusing CGWET array from kper " - "{0:5d}".format(iper) + " in kper " - "{0:5d}".format(iper + 1) + f" Reusing CGWET array from kper {iper:5d}" + f" in kper {iper + 1:5d}" ) if openfile: @@ -783,7 +763,7 @@ def load( cgwet=cgwet, unitnumber=unitnumber, filenames=filenames, - **kwargs + **kwargs, ) @staticmethod diff --git a/flopy/pakbase.py b/flopy/pakbase.py index 958c675b7..f8019c059 100644 --- a/flopy/pakbase.py +++ b/flopy/pakbase.py @@ -85,13 +85,13 @@ def _check_thresholds(chk, array, active, thresholds, name): chk.values( array, active & (array < mn), - "{} values below checker threshold of {}".format(name, mn), + f"{name} values below checker threshold of {mn}", "Warning", ) chk.values( array, active & (array > mx), - "{} values above checker threshold of {}".format(name, mx), + f"{name} values above checker threshold of {mx}", "Warning", ) @@ -243,7 +243,7 @@ def _check_flowp(self, f=None, verbose=True, level=1, checktype=None): chk.values( self.__dict__[kp].array, active & (self.__dict__[kp].array <= 0), - "zero or negative {} values".format(name), + f"zero or negative {name} values", "Error", ) @@ -335,9 +335,7 @@ def check(self, f=None, verbose=True, level=1, checktype=None): storage_coeff = False self._check_storage(chk, storage_coeff) else: - txt = "check method not implemented for {} Package.".format( - self.name[0] - ) + txt = f"check method not implemented for {self.name[0]} Package." if f is not None: if isinstance(f, str): pth = os.path.join(self.parent.model_ws, f) @@ -459,7 +457,7 @@ def __init__( self.file_name = [] for idx, e in enumerate(extension): self.extension.append(e) - file_name = self.parent.name + "." + e + file_name = f"{self.parent.name}.{e}" if filenames is not None: if idx < len(filenames): if filenames[idx] is not None: @@ -491,19 +489,13 @@ def __repr__(self): if not (attr in exclude_attributes): if isinstance(value, list): if len(value) == 1: - s += " {:s} = {:s}\n".format(attr, str(value[0])) + s += f" {attr} = {value[0]!s}\n" else: - s += " {:s} (list, items = {:d})\n".format( - attr, len(value) - ) + s += f" {attr} (list, items = {len(value)})\n" elif isinstance(value, np.ndarray): - s += " {:s} (array, shape = {:s})\n".format( - attr, str(value.shape)[1:-1] - ) + s += f" {attr} (array, shape = {str(value.shape)[1:-1]})\n" else: - s += " {:s} = {:s} ({:s})\n".format( - attr, str(value), str(type(value))[7:-2] - ) + s += f" {attr} = {value!s} ({str(type(value))[7:-2]})\n" return s def __getitem__(self, item): @@ -514,8 +506,7 @@ def __getitem__(self, item): if isinstance(item, MfList): if not isinstance(item, list) and not isinstance(item, tuple): msg = ( - "package.__getitem__() kper {} " - "not in data.keys()".format(item) + f"package.__getitem__() kper {item} not in data.keys()" ) assert item in list(spd.data.keys()), msg return spd[item] @@ -527,9 +518,7 @@ def __getitem__(self, item): ) msg = ( - "package.__getitem__() kper {} not in data.keys()".format( - item[0] - ) + f"package.__getitem__() kper {item[0]} not in data.keys()" ) assert item[0] in list(spd.data.keys()), msg @@ -745,23 +734,17 @@ def level1_arraylist(self, idx, v, name, txt): if k > kon: kon = k tag = name[k].lower().replace(" layer ", "") - txt += " {:>10s}{:>10s}{:>10s}{:>15s}\n".format( - "layer", "row", "column", tag - ) - txt += " {:10d}{:10d}{:10d}{:15.7g}\n".format( - k + 1, i + 1, j + 1, v[k, i, j] - ) + txt += f" {'layer':>10s}{'row':>10s}{'column':>10s}{tag:>15s}\n" + txt += f" {k + 1:10d}{i + 1:10d}{j + 1:10d}{v[k, i, j]:15.7g}\n" elif ndim == 2: tag = name[0].lower().replace(" layer ", "") - txt += " {:>10s}{:>10s}{:>15s}\n".format("row", "column", tag) + txt += f" {'row':>10s}{'column':>10s}{tag:>15s}\n" for [i, j] in idx: - txt += " {:10d}{:10d}{:15.7g}\n".format( - i + 1, j + 1, v[i, j] - ) + txt += f" {i + 1:10d}{j + 1:10d}{v[i, j]:15.7g}\n" elif ndim == 1: - txt += " {:>10s}{:>15s}\n".format("number", name[0]) + txt += f" {'number':>10s}{name[0]:>15s}\n" for i in idx: - txt += " {:10d}{:15.7g}\n".format(i + 1, v[i]) + txt += f" {i + 1:10d}{v[i]:15.7g}\n" return txt def plot(self, **kwargs): @@ -810,7 +793,7 @@ def plot(self, **kwargs): from flopy.plot import PlotUtilities if not self.plottable: - raise TypeError("Package {} is not plottable".format(self.name)) + raise TypeError(f"Package {self.name} is not plottable") axes = PlotUtilities._plot_package_helper(self, **kwargs) return axes @@ -849,20 +832,11 @@ def to_shapefile(self, filename, **kwargs): def webdoc(self): if self.parent.version == "mf2k": - wa = ( - "http://water.usgs.gov/nrp/gwsoftware/modflow2000/Guide/" - + self.url - ) + wa = f"http://water.usgs.gov/nrp/gwsoftware/modflow2000/Guide/{self.url}" elif self.parent.version == "mf2005": - wa = ( - "http://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/" - + self.url - ) + wa = f"http://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/{self.url}" elif self.parent.version == "ModflowNwt": - wa = ( - "http://water.usgs.gov/ogw/modflow-nwt/MODFLOW-NWT-Guide/" - + self.url - ) + wa = f"http://water.usgs.gov/ogw/modflow-nwt/MODFLOW-NWT-Guide/{self.url}" else: wa = None @@ -934,8 +908,7 @@ def load(f, model, pak_type, ext_unit_dict=None, **kwargs): mxl = int(t[2]) if model.verbose: print( - " Parameters detected. Number of " - "parameters = {}".format(nppak) + f" Parameters detected. Number of parameters = {nppak}" ) line = f.readline() @@ -947,21 +920,20 @@ def load(f, model, pak_type, ext_unit_dict=None, **kwargs): ipakcb = int(t[1]) except: if model.verbose: - print(" implicit ipakcb in {}".format(filename)) + print(f" implicit ipakcb in {filename}") if "modflowdrt" in pak_type_str: try: nppak = int(t[2]) imax += 1 except: if model.verbose: - print(" implicit nppak in {}".format(filename)) + print(f" implicit nppak in {filename}") if nppak > 0: mxl = int(t[3]) imax += 1 if model.verbose: print( - " Parameters detected. Number of " - "parameters = {}".format(nppak) + f" Parameters detected. Number of parameters = {nppak}" ) options = [] @@ -1041,7 +1013,7 @@ def load(f, model, pak_type, ext_unit_dict=None, **kwargs): current = None for iper in range(nper): if model.verbose: - msg = " loading {} for kper {:5d}".format(pak_type, iper + 1) + msg = f" loading {pak_type} for kper {iper + 1:5d}" print(msg) line = f.readline() if line == "": @@ -1053,7 +1025,7 @@ def load(f, model, pak_type, ext_unit_dict=None, **kwargs): itmpp = int(t[1]) except: if model.verbose: - print(" implicit itmpp in {}".format(filename)) + print(f" implicit itmpp in {filename}") if itmp == 0: bnd_output = None @@ -1096,8 +1068,7 @@ def load(f, model, pak_type, ext_unit_dict=None, **kwargs): except: if model.verbose: print( - " implicit static instance for " - "parameter {}".format(pname) + f" implicit static instance for parameter {pname}" ) par_dict, current_dict = pak_parms.get(pname) @@ -1177,7 +1148,7 @@ def load(f, model, pak_type, ext_unit_dict=None, **kwargs): ) if check: pak.check( - f="{}.chk".format(pak.name[0]), + f=f"{pak.name[0]}.chk", verbose=pak.parent.verbose, level=0, ) diff --git a/flopy/pest/params.py b/flopy/pest/params.py index 35e952470..7dc2535f9 100644 --- a/flopy/pest/params.py +++ b/flopy/pest/params.py @@ -75,7 +75,7 @@ def zonearray2params( for i, iz in enumerate(parzones): span = {} span["idx"] = np.where(zonearray == iz) - parname = partype + "_" + str(iz) + parname = f"{partype}_{iz}" startvalue = parvals[i] p = Params( mfpackage, diff --git a/flopy/pest/templatewriter.py b/flopy/pest/templatewriter.py index 62f306d20..fcbb80ed2 100644 --- a/flopy/pest/templatewriter.py +++ b/flopy/pest/templatewriter.py @@ -42,7 +42,7 @@ def write_template(self): try: pak = self.model.get_package(ftype) except: - raise Exception("Package type {} not found.".format(ftype)) + raise Exception(f"Package type {ftype} not found.") # Check to make sure pak has p.type as an attribute if not hasattr(pak, p.type.lower()): @@ -56,10 +56,7 @@ def write_template(self): ftypelist.append(ftype) # Print a list of packages that will be parameterized - print( - "The following packages will be parameterized: " - "{}\n".format(ftypelist) - ) + print(f"The following packages will be parameterized: {ftypelist}\n") # Go through each package, and then through each parameter and make # the substitution. Then write the template file. diff --git a/flopy/pest/tplarray.py b/flopy/pest/tplarray.py index 6ff34932e..c07156a81 100644 --- a/flopy/pest/tplarray.py +++ b/flopy/pest/tplarray.py @@ -30,18 +30,14 @@ def add_parameter(self, p): """ # Verify parameter span contents if "kpers" not in p.span: - raise Exception( - "Parameter {} span does not contain kper.".format(p.name) - ) + raise Exception(f"Parameter {p.name} span does not contain kper.") if "idx" not in p.span: - raise Exception( - "Parameter {} span does not contain idx.".format(p.name) - ) + raise Exception(f"Parameter {p.name} span does not contain idx.") if p.span["idx"] is None: # Multiplier parameter is when p.span['idx'] is None for kper in p.span["kpers"]: - self.multipliers[kper] = "~ {0:^13s} ~".format(p.name) + self.multipliers[kper] = f"~ {p.name:^13s} ~" else: # Index parameter otherwise for kper in p.span["kpers"]: @@ -75,7 +71,7 @@ def get_kper_entry(self, kper): if kper in self.params: for p in self.params[kper]: idx = p.span["idx"] - chararray[idx] = "~{0:^13s}~".format(p.name) + chararray[idx] = f"~{p.name:^13s}~" u2dtpl = Util2dTpl(chararray, u2d.name, multiplier, indexed_param) return (1, u2dtpl.get_file_entry()) else: @@ -132,11 +128,11 @@ def add_parameter(self, p): if "layers" in p.span: for l in p.span["layers"]: - self.multipliers[l] = "~ {0:^13s} ~".format(p.name) + self.multipliers[l] = f"~ {p.name:^13s} ~" if "idx" in p.span and p.span["idx"] is not None: idx = p.span["idx"] - self.chararray[idx] = "~{0:^13s}~".format(p.name) + self.chararray[idx] = f"~{p.name:^13s}~" self.indexed_params = True return @@ -177,18 +173,18 @@ def get_file_entry(self): ncol = self.chararray.shape[-1] au = np.unique(self.chararray) if au.shape[0] == 1 and self.multiplier is None: - file_entry = "CONSTANT {0} #{1}\n".format(au[0], self.name) + file_entry = f"CONSTANT {au[0]} #{self.name}\n" else: mult = 1.0 if self.multiplier is not None: mult = self.multiplier - cr = "INTERNAL {0} (FREE) -1 #{1}\n".format(mult, self.name) + cr = f"INTERNAL {mult} (FREE) -1 #{self.name}\n" astring = "" icount = 0 for i in range(self.chararray.shape[0]): for j in range(self.chararray.shape[1]): icount += 1 - astring += " {0:>15s}".format(self.chararray[i, j]) + astring += f" {self.chararray[i, j]:>15s}" if icount == 10 or j == ncol - 1: astring += "\n" icount = 0 diff --git a/flopy/plot/crosssection.py b/flopy/plot/crosssection.py index d85a3deeb..49b213eda 100644 --- a/flopy/plot/crosssection.py +++ b/flopy/plot/crosssection.py @@ -87,7 +87,7 @@ def __init__( "dictionary keys specified: " ) for k in line.keys(): - s += "{} ".format(k) + s += f"{k} " raise AssertionError(s) if ax is None: @@ -179,7 +179,7 @@ def __init__( if len(self.xypts) < 2: s = "cross-section cannot be created\n." s += " less than 2 points intersect the model grid\n" - s += " {} points intersect the grid.".format(len(self.xypts)) + s += f" {len(self.xypts)} points intersect the grid." raise Exception(s) if self.geographic_coords: @@ -430,7 +430,7 @@ def plot_fill_between( colors=("blue", "red"), masked_values=None, head=None, - **kwargs + **kwargs, ): """ Plot a three-dimensional array as lines. @@ -646,7 +646,7 @@ def plot_ibound( color_ch="blue", color_vpt="red", head=None, - **kwargs + **kwargs, ): """ Make a plot of ibound. If not specified, then pull ibound from the @@ -700,7 +700,7 @@ def plot_ibound( head=head, cmap=cmap, norm=norm, - **kwargs + **kwargs, ) return patches @@ -787,7 +787,7 @@ def plot_bc( mflist = pp.stress_period_data.array[kper] except Exception as e: raise Exception( - "Not a list-style boundary package: " + str(e) + f"Not a list-style boundary package: {e!s}" ) if mflist is None: return @@ -810,7 +810,7 @@ def plot_bc( mflist = p.stress_period_data[kper] except Exception as e: raise Exception( - "Not a list-style boundary package: " + str(e) + f"Not a list-style boundary package: {e!s}" ) if mflist is None: return @@ -846,7 +846,7 @@ def plot_bc( head=head, cmap=cmap, norm=norm, - **kwargs + **kwargs, ) return patches @@ -861,7 +861,7 @@ def plot_vector( hstep=1, normalize=False, masked_values=None, - **kwargs + **kwargs, ): """ Plot a vector. @@ -1150,7 +1150,7 @@ def plot_endpoint( selection_direction=None, method="cell", head=None, - **kwargs + **kwargs, ): """ diff --git a/flopy/plot/map.py b/flopy/plot/map.py index 35afec4a4..0ea444003 100644 --- a/flopy/plot/map.py +++ b/flopy/plot/map.py @@ -303,7 +303,7 @@ def plot_ibound( color_noflow="black", color_ch="blue", color_vpt="red", - **kwargs + **kwargs, ): """ Make a plot of ibound. If not specified, then pull ibound from the @@ -389,7 +389,7 @@ def plot_bc( kper=0, color=None, plotAll=False, - **kwargs + **kwargs, ): """ Plot boundary conditions locations for a specific boundary @@ -446,7 +446,7 @@ def plot_bc( mflist = pp.stress_period_data.array[kper] except Exception as e: raise Exception( - "Not a list-style boundary package: " + str(e) + f"Not a list-style boundary package: {e!s}" ) if mflist is None: return @@ -469,7 +469,7 @@ def plot_bc( mflist = p.stress_period_data[kper] except Exception as e: raise Exception( - "Not a list-style boundary package: " + str(e) + f"Not a list-style boundary package: {e!s}" ) if mflist is None: return @@ -627,7 +627,7 @@ def plot_vector( jstep=1, normalize=False, masked_values=None, - **kwargs + **kwargs, ): """ Plot a vector. @@ -862,7 +862,7 @@ def plot_endpoint( direction="ending", selection=None, selection_direction=None, - **kwargs + **kwargs, ): """ Plot the MODPATH endpoints. diff --git a/flopy/plot/plotutil.py b/flopy/plot/plotutil.py index bf7dfa730..d5a27acc6 100644 --- a/flopy/plot/plotutil.py +++ b/flopy/plot/plotutil.py @@ -116,7 +116,7 @@ def _plot_simulation_helper(simulation, model_list, SelPackList, **kwargs): model_filename_base = None if filename_base is not None: - model_filename_base = filename_base + "_" + model_name + model_filename_base = f"{filename_base}_{model_name}" if model.verbose: print(" Plotting Model: ", model_name) @@ -131,7 +131,7 @@ def _plot_simulation_helper(simulation, model_list, SelPackList, **kwargs): key=defaults["key"], initial_fig=ifig, model_name=model_name, - **kwargs + **kwargs, ) if isinstance(caxs, list): @@ -407,7 +407,7 @@ def _plot_package_helper(package, **kwargs): fignum=fignum, colorbar=colorbar, modelgrid=defaults["modelgrid"], - **kwargs + **kwargs, ) if ax is not None: @@ -535,7 +535,7 @@ def _plot_mflist_helper( filename_base=None, file_extension=None, mflay=None, - **kwargs + **kwargs, ): """ Plot stress period boundary condition (MfList) data for a specified @@ -655,7 +655,7 @@ def _plot_mflist_helper( filenames=filenames, mflay=mflay, modelgrid=modelgrid, - **kwargs + **kwargs, ) else: arr_dict = mflist.to_array(kper, mask=True) @@ -664,9 +664,9 @@ def _plot_mflist_helper( arr = arr_dict[key] except: err_msg = "Cannot find key to plot\n" - err_msg += " Provided key={}\n Available keys=".format(key) + err_msg += f" Provided key={key}\n Available keys=" for name, arr in arr_dict.items(): - err_msg += "{}, ".format(name) + err_msg += f"{name}, " err_msg += "\n" raise PlotException(err_msg) @@ -677,7 +677,7 @@ def _plot_mflist_helper( filenames=filenames, mflay=mflay, modelgrid=modelgrid, - **kwargs + **kwargs, ) return axes @@ -688,7 +688,7 @@ def _plot_util2d_helper( filename_base=None, file_extension=None, fignum=None, - **kwargs + **kwargs, ): """ Plot 2-D model input data @@ -751,7 +751,7 @@ def _plot_util2d_helper( modelgrid = kwargs.pop("modelgrid") if title is None: - title = "{}{}".format(model_name, util2d.name) + title = f"{model_name}{util2d.name}" if file_extension is not None: fext = file_extension @@ -760,7 +760,7 @@ def _plot_util2d_helper( filename = None if filename_base is not None: - filename = "{}_{}.{}".format(filename_base, util2d.name, fext) + filename = f"{filename_base}_{util2d.name}.{fext}" axes = PlotUtilities._plot_array_helper( util2d.array, @@ -769,7 +769,7 @@ def _plot_util2d_helper( filenames=filename, fignum=fignum, modelgrid=modelgrid, - **kwargs + **kwargs, ) return axes @@ -780,7 +780,7 @@ def _plot_util3d_helper( file_extension=None, mflay=None, fignum=None, - **kwargs + **kwargs, ): """ Plot 3-D model input data @@ -859,7 +859,7 @@ def _plot_util3d_helper( name = [name] * nplottable_layers names = [ - "{}{} layer {}".format(model_name, name[k], k + 1) + f"{model_name}{name[k]} layer {k + 1}" for k in range(nplottable_layers) ] @@ -867,7 +867,7 @@ def _plot_util3d_helper( if filename_base is not None: # build filenames, use local "name" variable (flopy6 adaptation) filenames = [ - "{}_{}_Layer{}.{}".format(filename_base, name[k], k + 1, fext) + f"{filename_base}_{name[k]}_Layer{k + 1}.{fext}" for k in range(nplottable_layers) ] @@ -879,7 +879,7 @@ def _plot_util3d_helper( mflay=mflay, fignum=fignum, modelgrid=modelgrid, - **kwargs + **kwargs, ) return axes @@ -890,7 +890,7 @@ def _plot_transient2d_helper( file_extension=None, kper=0, fignum=None, - **kwargs + **kwargs, ): """ Plot transient 2-D model input data @@ -989,7 +989,7 @@ def _plot_transient2d_helper( ) if filename_base is not None: - filename = filename_base + "_{:05d}.{}".format(kper + 1, fext) + filename = f"{filename_base}_{kper + 1:05d}.{fext}" else: filename = None @@ -1001,7 +1001,7 @@ def _plot_transient2d_helper( filenames=filename, fignum=fignum[idx], modelgrid=modelgrid, - **kwargs + **kwargs, ) ) return axes @@ -1044,7 +1044,7 @@ def _plot_scalar_helper( title = scalar.name.replace("_", "").upper() if filename_base is not None: - filename = filename_base + ".{}".format(fext) + filename = f"{filename_base}.{fext}" else: filename = None @@ -1054,7 +1054,7 @@ def _plot_scalar_helper( names=title, filenames=filename, modelgrid=modelgrid, - **kwargs + **kwargs, ) return axes @@ -1068,7 +1068,7 @@ def _plot_array_helper( filenames=None, fignum=None, mflay=None, - **kwargs + **kwargs, ): """ Helper method to plot array objects @@ -1187,7 +1187,7 @@ def _plot_array_helper( plotarray, masked_values=defaults["masked_values"], ax=axes[idx], - **kwargs + **kwargs, ) if defaults["colorbar"]: @@ -1203,7 +1203,7 @@ def _plot_array_helper( ax=axes[idx], colors=defaults["colors"], levels=defaults["levels"], - **kwargs + **kwargs, ) if defaults["clabel"]: axes[idx].clabel(cl, fmt=defaults["fmt"], **kwargs) @@ -1222,9 +1222,7 @@ def _plot_array_helper( for idx, k in enumerate(range(i0, i1)): fig = plt.figure(num=fignum[idx]) fig.savefig(filenames[idx], dpi=defaults["dpi"]) - print( - " created...{}".format(os.path.basename(filenames[idx])) - ) + print(f" created...{os.path.basename(filenames[idx])}") # there will be nothing to return when done axes = None plt.close("all") @@ -1240,7 +1238,7 @@ def _plot_bc_helper( filenames=None, fignum=None, mflay=None, - **kwargs + **kwargs, ): """ Helper method to plot bc objects from flopy packages @@ -1341,9 +1339,7 @@ def _plot_bc_helper( fig = plt.figure(num=fignum[idx]) fig.savefig(filenames[idx], dpi=defaults["dpi"]) plt.close(fignum[idx]) - print( - " created...{}".format(os.path.basename(filenames[idx])) - ) + print(f" created...{os.path.basename(filenames[idx])}") # there will be nothing to return when done axes = None plt.close("all") @@ -1404,13 +1400,10 @@ def _set_names(names, maxlay): if names is not None: if not isinstance(names, list): if maxlay > 1: - names = [ - "{} layer {}".format(names, i + 1) - for i in range(maxlay) - ] + names = [f"{names} layer {i + 1}" for i in range(maxlay)] else: names = [names] - msg = "{} /= {}: {}".format(len(names), maxlay, names) + msg = f"{len(names)} /= {maxlay}: {names}" assert len(names) == maxlay, msg return names @@ -1440,7 +1433,7 @@ def _set_fignum(fignum, maxlay, i0, i1): if fignum is not None: if not isinstance(fignum, list): fignum = [fignum] - msg = "{} /= {}".format(len(fignum), maxlay) + msg = f"{len(fignum)} /= {maxlay}" assert len(fignum) == maxlay, msg # check for existing figures f0 = fignum[0] @@ -1504,7 +1497,7 @@ def _set_axes(axes, mflay, maxlay, i0, i1, defaults, names, fignum): klay = k if mflay is not None: klay = int(mflay) - title = "{} Layer {}".format("data", klay + 1) + title = f"data Layer {klay + 1}" ax.set_title(title) axes.append(ax) @@ -2239,7 +2232,7 @@ def plot_shapefile( a=None, masked_values=None, idx=None, - **kwargs + **kwargs, ): """ Generic function for plotting a shapefile. @@ -2370,7 +2363,7 @@ def plot_cvfd( facecolor="scaled", a=None, masked_values=None, - **kwargs + **kwargs, ): """ Generic function for plotting a control volume finite difference grid of @@ -2637,7 +2630,7 @@ def advanced_package_bc_helper(pkg, modelgrid, kper): idx = np.array(idx) else: raise NotImplementedError( - "Pkg {} not implemented for bc plotting".format(pkg.package_type) + f"Pkg {pkg.package_type} not implemented for bc plotting" ) return idx diff --git a/flopy/plot/styles.py b/flopy/plot/styles.py index 0c21a2e55..d402cf6e1 100644 --- a/flopy/plot/styles.py +++ b/flopy/plot/styles.py @@ -54,7 +54,7 @@ def set_font_type(cls, family, fontname): None """ mpl.rcParams["font.family"] = family - mpl.rcParams["font." + family] = fontname + mpl.rcParams[f"font.{family}"] = fontname return mpl.rcParams @classmethod @@ -112,7 +112,7 @@ def heading( letter = letter.rstrip() if not letter.endswith("."): letter += "." - text = letter + " " + heading + text = f"{letter} {heading}" else: text = heading @@ -267,7 +267,7 @@ def add_text( fontsize=9, ha="left", va="bottom", - **kwargs + **kwargs, ): """Add USGS-style text to a axis object @@ -324,7 +324,7 @@ def add_text( ha=ha, fontdict=font, transform=transform, - **kwargs + **kwargs, ) return text_obj @@ -340,7 +340,7 @@ def add_annotation( fontsize=9, ha="left", va="bottom", - **kwargs + **kwargs, ): """Add an annotation to a axis object @@ -436,7 +436,7 @@ def __set_fontspec(cls, bold=True, italic=True, fontsize=9, family=False): dict """ family = mpl.rcParams["font.family"][0] - font = mpl.rcParams["font." + family][0] + font = mpl.rcParams[f"font.{family}"][0] if bold: weight = "bold" diff --git a/flopy/seawat/swt.py b/flopy/seawat/swt.py index b80161d31..9ea83a24c 100644 --- a/flopy/seawat/swt.py +++ b/flopy/seawat/swt.py @@ -146,11 +146,7 @@ def __init__( # external_path = os.path.join(model_ws, external_path) if os.path.exists(external_path): - print( - "Note: external_path " - + str(external_path) - + " already exists" - ) + print(f"Note: external_path {external_path} already exists") # assert os.path.exists(external_path),'external_path does not exist' else: os.mkdir(external_path) @@ -328,7 +324,7 @@ def write_name_file(self): # open and write header fn_path = os.path.join(self.model_ws, self.namefile) f_nam = open(fn_path, "w") - f_nam.write("{}\n".format(self.heading)) + f_nam.write(f"{self.heading}\n") # Write global file entry if self.glo is not None: @@ -362,7 +358,7 @@ def write_name_file(self): tag = "DATA" if b: tag = "DATA(BINARY)" - f_nam.write("{0:14s} {1:5d} {2}\n".format(tag, u, f)) + f_nam.write(f"{tag:14s} {u:5d} {f}\n") # write the output files for u, f, b in zip( @@ -373,11 +369,9 @@ def write_name_file(self): if u == 0: continue if b: - f_nam.write( - "DATA(BINARY) {:5d} {} REPLACE\n".format(u, f) - ) + f_nam.write(f"DATA(BINARY) {u:5d} {f} REPLACE\n") else: - f_nam.write("DATA {:5d} {}\n".format(u, f)) + f_nam.write(f"DATA {u:5d} {f}\n") if self._mt is not None: # write the external files @@ -389,7 +383,7 @@ def write_name_file(self): tag = "DATA" if b: tag = "DATA(BINARY)" - f_nam.write("{0:14s} {1:5d} {2}\n".format(tag, u, f)) + f_nam.write(f"{tag:14s} {u:5d} {f}\n") # write the output files for u, f, b in zip( @@ -400,11 +394,9 @@ def write_name_file(self): if u == 0: continue if b: - f_nam.write( - "DATA(BINARY) {:5d} {} REPLACE\n".format(u, f) - ) + f_nam.write(f"DATA(BINARY) {u:5d} {f} REPLACE\n") else: - f_nam.write("DATA {:5d} {}\n".format(u, f)) + f_nam.write(f"DATA {u:5d} {f}\n") # write the external files for b, u, f in zip( @@ -413,7 +405,7 @@ def write_name_file(self): tag = "DATA" if b: tag = "DATA(BINARY)" - f_nam.write("{0:14s} {1:5d} {2}\n".format(tag, u, f)) + f_nam.write(f"{tag:14s} {u:5d} {f}\n") # write the output files for u, f, b in zip( @@ -422,9 +414,9 @@ def write_name_file(self): if u == 0: continue if b: - f_nam.write("DATA(BINARY) {:5d} {} REPLACE\n".format(u, f)) + f_nam.write(f"DATA(BINARY) {u:5d} {f} REPLACE\n") else: - f_nam.write("DATA {:5d} {}\n".format(u, f)) + f_nam.write(f"DATA {u:5d} {f}\n") f_nam.close() return diff --git a/flopy/seawat/swtvdf.py b/flopy/seawat/swtvdf.py index a2ab3d630..6f393b36b 100644 --- a/flopy/seawat/swtvdf.py +++ b/flopy/seawat/swtvdf.py @@ -205,7 +205,7 @@ def __init__( extension="vdf", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): if unitnumber is None: @@ -409,10 +409,10 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): nswtcpl = int(t[2]) iwtable = int(t[3]) if model.verbose: - print(" MT3DRHOFLG {}".format(mt3drhoflg)) - print(" MFNADVFD {}".format(mfnadvfd)) - print(" NSWTCPL {}".format(nswtcpl)) - print(" IWTABLE {}".format(iwtable)) + print(f" MT3DRHOFLG {mt3drhoflg}") + print(f" MFNADVFD {mfnadvfd}") + print(f" NSWTCPL {nswtcpl}") + print(f" IWTABLE {iwtable}") # Item 2 -- DENSEMIN DENSEMAX if model.verbose: @@ -491,15 +491,14 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): if model.verbose: print( - " loading INDENSE " - "for stress period {}...".format(iper + 1) + f" loading INDENSE for stress period {iper + 1}..." ) line = f.readline() t = line.strip().split() indense = int(t[0]) if indense > 0: - name = "DENSE_StressPeriod_{}".format(iper) + name = f"DENSE_StressPeriod_{iper}" t = Util3d.load( f, model, diff --git a/flopy/seawat/swtvsc.py b/flopy/seawat/swtvsc.py index ad488112b..4256cb1c7 100644 --- a/flopy/seawat/swtvsc.py +++ b/flopy/seawat/swtvsc.py @@ -145,7 +145,7 @@ def __init__( extension="vsc", unitnumber=None, filenames=None, - **kwargs + **kwargs, ): if len(list(kwargs.keys())) > 0: @@ -231,21 +231,19 @@ def write_file(self): f_vsc = open(self.fn_path, "w") # item 1 - f_vsc.write("{}\n".format(self.mt3dmuflg)) + f_vsc.write(f"{self.mt3dmuflg}\n") # item 2 - f_vsc.write("{} {}\n".format(self.viscmin, self.viscmax)) + f_vsc.write(f"{self.viscmin} {self.viscmax}\n") # item 3 if self.mt3dmuflg >= 0: - f_vsc.write( - "{} {} {}\n".format(self.viscref, self.dmudc, self.cmuref) - ) + f_vsc.write(f"{self.viscref} {self.dmudc} {self.cmuref}\n") # item 3a-d if self.mt3dmuflg == -1: - f_vsc.write("{}\n".format(self.viscref)) - f_vsc.write("{} {}\n".format(self.nsmueos, self.mutempopt)) + f_vsc.write(f"{self.viscref}\n") + f_vsc.write(f"{self.nsmueos} {self.mutempopt}\n") # if self.nsmueos == 1: # f_vsc.write('{} {} {}\n'.format(self.mtmuspec, self.dmudc, # self.cmuref)) @@ -257,16 +255,12 @@ def write_file(self): if self.nsmueos > 0: for iwr in range(self.nsmueos): f_vsc.write( - "{} {} {}\n".format( - self.mtmuspec[iwr], - self.dmudc[iwr], - self.cmuref[iwr], - ) + f"{self.mtmuspec[iwr]} {self.dmudc[iwr]} {self.cmuref[iwr]}\n" ) # item 3d if self.mutempopt > 0: - s = "{} ".format(self.mtmutempspec) + s = f"{self.mtmutempspec} " for a in tuple(self.amucoeff): s += "{} ".format(a) f_vsc.write(s + "\n") @@ -281,11 +275,11 @@ def write_file(self): # item 4 (and possibly 5) if itmp > 0: - f_vsc.write("{}\n".format(self.invisc)) + f_vsc.write(f"{self.invisc}\n") f_vsc.write(file_entry_visc) else: - f_vsc.write("{}\n".format(itmp)) + f_vsc.write(f"{itmp}\n") f_vsc.close() return @@ -353,7 +347,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): t = line.strip().split() mt3dmuflg = int(t[0]) if model.verbose: - print(" MT3DMUFLG {}".format(mt3dmuflg)) + print(f" MT3DMUFLG {mt3dmuflg}") # Item 2 -- VISCMIN VISCMAX if model.verbose: @@ -363,8 +357,8 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): viscmin = float(t[0]) viscmax = float(t[1]) if model.verbose: - print(" VISCMIN {}".format(viscmin)) - print(" VISCMAX {}".format(viscmax)) + print(f" VISCMIN {viscmin}") + print(f" VISCMAX {viscmax}") # Item 3 -- VISCREF NSMUEOS MUTEMPOPT MTMUSPEC DMUDC CMUREF nsmueos = None @@ -384,9 +378,9 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): cmuref = float(t[2]) nsmueos = 1 if model.verbose: - print(" VISCREF {}".format(viscref)) - print(" DMUDC {}".format(dmudc)) - print(" CMUREF {}".format(cmuref)) + print(f" VISCREF {viscref}") + print(f" DMUDC {dmudc}") + print(f" CMUREF {cmuref}") else: # Item 3a if model.verbose: @@ -395,7 +389,7 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): t = line.strip().split() viscref = float(t[0]) if model.verbose: - print(" VISCREF {}".format(viscref)) + print(f" VISCREF {viscref}") # Item 3b if model.verbose: @@ -413,8 +407,8 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): else: muncoeff = None if model.verbose: - print(" NSMUEOS {}".format(nsmueos)) - print(" MUTEMPOPT {}".format(mutempopt)) + print(f" NSMUEOS {nsmueos}") + print(f" MUTEMPOPT {mutempopt}") # Item 3c if model.verbose: @@ -429,9 +423,9 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): dmudc.append(float(t[1])) cmuref.append(float(t[2])) if model.verbose: - print(" MTMUSPEC {}".format(mtmuspec)) - print(" DMUDC {}".format(dmudc)) - print(" CMUREF {}".format(cmuref)) + print(f" MTMUSPEC {mtmuspec}") + print(f" DMUDC {dmudc}") + print(f" CMUREF {cmuref}") # Item 3d if mutempopt > 0: @@ -444,8 +438,8 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): for i in range(muncoeff): amucoeff.append(float(t[i + 1])) if model.verbose: - print(" MTMUTEMSPEC {}".format(mtmutempspec)) - print(" AMUCOEFF {}".format(amucoeff)) + print(f" MTMUTEMSPEC {mtmutempspec}") + print(f" AMUCOEFF {amucoeff}") # Items 4 and 5 -- INVISC VISC invisc = None @@ -458,16 +452,13 @@ def load(cls, f, model, nper=None, ext_unit_dict=None): for iper in range(nper): if model.verbose: - print( - " loading INVISC " - "for stress period {}...".format(iper + 1) - ) + print(f" loading INVISC for stress period {iper + 1}...") line = f.readline() t = line.strip().split() invisc = int(t[0]) if invisc > 0: - name = "VISC_StressPeriod_{}".format(iper) + name = f"VISC_StressPeriod_{iper}" t = Util3d.load( f, model, diff --git a/flopy/utils/binaryfile.py b/flopy/utils/binaryfile.py index 052078cce..a46419d9b 100755 --- a/flopy/utils/binaryfile.py +++ b/flopy/utils/binaryfile.py @@ -54,18 +54,19 @@ def set_values(self, **kwargs): try: self.header[0][k] = int(kwargs[k]) except: - msg = "{0} key not available in {1} header " - "dtype".format(k, self.header_type) - print(msg) + print( + f"{k} key not available in {self.header_type} " + "header dtype" + ) for k in fkey: if k in kwargs.keys(): try: self.header[0][k] = float(kwargs[k]) except: - msg = "{} key not available ".format( - k - ) + "in {} header dtype".format(self.header_type) - print(msg) + print( + f"{k} key not available " + f"in {self.header_type} header dtype" + ) for k in ckey: if k in kwargs.keys(): # Convert to upper case to be consistent case used by MODFLOW @@ -217,7 +218,7 @@ def get_headfile_precision(filename): f.seek(0, 0) # reset to beginning assert f.tell() == 0 if totalbytes == 0: - raise IOError("datafile error: file is empty: " + str(filename)) + raise IOError(f"datafile error: file is empty: {filename}") # first try single vartype = [ @@ -260,8 +261,7 @@ def get_headfile_precision(filename): except: f.close() raise IOError( - "Could not determine the precision of " - "the headfile {}".format(filename) + f"Could not determine the precision of the headfile {filename}" ) # close and return result @@ -482,9 +482,7 @@ def __init__( if precision == "auto": precision = get_headfile_precision(filename) if precision == "unknown": - s = "Error. Precision could not be determined for {}".format( - filename - ) + s = f"Error. Precision could not be determined for {filename}" print(s) raise Exception() self.header_dtype = BinaryHeader.set_dtype( @@ -546,15 +544,13 @@ def __init__( text="concentration", precision="auto", verbose=False, - **kwargs + **kwargs, ): self.text = text.encode() if precision == "auto": precision = get_headfile_precision(filename) if precision == "unknown": - s = "Error. Precision could not be determined for {}".format( - filename - ) + s = f"Error. Precision could not be determined for {filename}" print(s) raise Exception() self.header_dtype = BinaryHeader.set_dtype( @@ -614,7 +610,7 @@ def __init__(self, filename, precision="auto", verbose=False, **kwargs): self.file.seek(0, 0) # reset to beginning assert self.file.tell() == 0 if totalbytes == 0: - raise IOError("datafile error: file is empty: " + str(filename)) + raise IOError(f"datafile error: file is empty: {filename}") self.nrow = 0 self.ncol = 0 self.nlay = 0 @@ -662,7 +658,7 @@ def __init__(self, filename, precision="auto", verbose=False, **kwargs): self.modelgrid = kwargs.pop("modelgrid") if len(kwargs.keys()) > 0: args = ",".join(kwargs.keys()) - raise Exception("LayerFile error: unrecognized kwargs: " + args) + raise Exception(f"LayerFile error: unrecognized kwargs: {args}") if precision == "auto": success = self._set_precision("single") @@ -676,12 +672,11 @@ def __init__(self, filename, precision="auto", verbose=False, **kwargs): elif precision == "double": success = self._set_precision(precision) else: - raise Exception("Unknown precision specified: " + precision) + raise Exception(f"Unknown precision specified: {precision}") if not success: raise Exception( - "Budget file could not be read using " - "{} precision".format(precision) + f"Budget file could not be read using {precision} precision" ) return @@ -861,7 +856,7 @@ def _build_index(self): s = header[itxt] if isinstance(s, bytes): s = s.decode() - print(itxt + ": " + str(s)) + print(f"{itxt}: {s}") print("file position: ", ipos) if ( int(header["imeth"]) != 5 @@ -945,7 +940,7 @@ def _skip_record(self, header): + naux * self.realtype(1).nbytes ) else: - raise Exception("invalid method code " + str(imeth)) + raise Exception(f"invalid method code {imeth}") if nbytes != 0: self.file.seek(nbytes, 1) return @@ -1041,7 +1036,7 @@ def list_unique_records(self): for rec, imeth in zip(self.textlist, self.imethlist): if isinstance(rec, bytes): rec = rec.decode() - print("{:16} {:5d}".format(rec.strip(), imeth)) + print(f"{rec.strip():16} {imeth:5d}") return def list_unique_packages(self): @@ -1524,7 +1519,7 @@ def get_record(self, idx, full3D=False): t = header["text"][0] if isinstance(t, bytes): t = t.decode("utf-8") - s = "Returning " + str(t).strip() + " as " + s = f"Returning {t.strip()} as " nlay = abs(header["nlay"][0]) nrow = header["nrow"][0] @@ -1533,7 +1528,7 @@ def get_record(self, idx, full3D=False): # default method if imeth == 0: if self.verbose: - s += "an array of shape " + str((nlay, nrow, ncol)) + s += f"an array of shape {(nlay, nrow, ncol)}" print(s) return binaryread( self.file, self.realtype(1), shape=(nlay, nrow, ncol) @@ -1541,7 +1536,7 @@ def get_record(self, idx, full3D=False): # imeth 1 elif imeth == 1: if self.verbose: - s += "an array of shape " + str((nlay, nrow, ncol)) + s += f"an array of shape {(nlay, nrow, ncol)}" print(s) return binaryread( self.file, self.realtype(1), shape=(nlay, nrow, ncol) @@ -1553,11 +1548,9 @@ def get_record(self, idx, full3D=False): dtype = np.dtype([("node", np.int32), ("q", self.realtype)]) if self.verbose: if full3D: - s += "a numpy masked array of size ({}, {}, {})".format( - nlay, nrow, ncol - ) + s += f"a numpy masked array of size ({nlay}, {nrow}, {ncol})" else: - s += "a numpy recarray of size ({}, 2)".format(nlist) + s += f"a numpy recarray of size ({nlist}, 2)" print(s) data = binaryread(self.file, dtype, shape=(nlist,)) if full3D: @@ -1571,9 +1564,7 @@ def get_record(self, idx, full3D=False): data = binaryread(self.file, self.realtype(1), shape=(nrow, ncol)) if self.verbose: if full3D: - s += "a numpy masked array of size ({}, {}, {})".format( - nlay, nrow, ncol - ) + s += f"a numpy masked array of size ({nlay}, {nrow}, {ncol})" else: s += ( "a list of two 2D numpy arrays. " @@ -1596,7 +1587,7 @@ def get_record(self, idx, full3D=False): # imeth 4 elif imeth == 4: if self.verbose: - s += "a 2d numpy array of size ({}, {})".format(nrow, ncol) + s += f"a 2d numpy array of size ({nrow}, {ncol})" print(s) return binaryread(self.file, self.realtype(1), shape=(nrow, ncol)) @@ -1615,16 +1606,12 @@ def get_record(self, idx, full3D=False): data = binaryread(self.file, dtype, shape=(nlist,)) if full3D: if self.verbose: - s += "a list array of shape ({}, {}, {})".format( - nlay, nrow, ncol - ) + s += f"a list array of shape ({nlay}, {nrow}, {ncol})" print(s) return self.create3D(data, nlay, nrow, ncol) else: if self.verbose: - s += "a numpy recarray of size ({}, {})".format( - nlist, 2 + naux - ) + s += f"a numpy recarray of size ({nlist}, {2 + naux})" print(s) return data.view(np.recarray) @@ -1644,22 +1631,17 @@ def get_record(self, idx, full3D=False): data = binaryread(self.file, dtype, shape=(nlist,)) if self.verbose: if full3D: - s += ( - "full 3D arrays not supported for " - "imeth = {}".format(imeth) - ) + s += f"full 3D arrays not supported for imeth = {imeth}" else: - s += "a numpy recarray of size ({}, 2)".format(nlist) + s += f"a numpy recarray of size ({nlist}, 2)" print(s) if full3D: - s += "full 3D arrays not supported for imeth = {}".format( - imeth - ) + s += f"full 3D arrays not supported for imeth = {imeth}" raise ValueError(s) else: return data.view(np.recarray) else: - raise ValueError("invalid imeth value - {}".format(imeth)) + raise ValueError(f"invalid imeth value - {imeth}") # should not reach this point return @@ -1752,7 +1734,7 @@ def get_residual(self, totim, scaled=False): for i in select_indices: text = self.recordarray[i]["text"].decode() if self.verbose: - print("processing {}".format(text)) + print(f"processing {text}") flow = self.get_record(idx=i, full3D=True) if ncol > 1 and "RIGHT FACE" in text: residual -= flow[:, :, :] @@ -1871,9 +1853,7 @@ def __init__( if precision == "auto": precision = get_headfile_precision(filename) if precision == "unknown": - s = "Error. Precision could not be determined for {}".format( - filename - ) + s = f"Error. Precision could not be determined for {filename}" print(s) raise Exception() self.header_dtype = BinaryHeader.set_dtype( @@ -1892,7 +1872,7 @@ def _get_data_array(self, totim=0.0): if totim >= 0.0: keyindices = np.where((self.recordarray["totim"] == totim))[0] if len(keyindices) == 0: - msg = "totim value ({}) not found in file...".format(totim) + msg = f"totim value ({totim}) not found in file..." raise Exception(msg) else: raise Exception("Data not found...") @@ -1906,10 +1886,7 @@ def _get_data_array(self, totim=0.0): nend = self.recordarray["nrow"][idx] npl = nend - nstrt + 1 if self.verbose: - msg = "Byte position in file: {} for ".format( - ipos - ) + "layer {}".format(ilay) - print(msg) + print(f"Byte position in file: {ipos} for layer {ilay}") self.file.seek(ipos, 0) data[ilay - 1] = binaryread(self.file, self.realtype, shape=(npl,)) return data diff --git a/flopy/utils/check.py b/flopy/utils/check.py index 1ac673e85..3a62a819d 100644 --- a/flopy/utils/check.py +++ b/flopy/utils/check.py @@ -97,12 +97,10 @@ def __init__( # if isinstance(package, BaseModel): didn't work if hasattr(package, "parent"): self.model = package.parent - self.prefix = "{} PACKAGE DATA VALIDATION".format(package.name[0]) + self.prefix = f"{package.name[0]} PACKAGE DATA VALIDATION" else: self.model = package - self.prefix = "{} MODEL DATA VALIDATION SUMMARY".format( - self.model.name - ) + self.prefix = f"{self.model.name} MODEL DATA VALIDATION SUMMARY" self.package = package if "structured" in self.model.__dict__: self.structured = self.model.structured @@ -125,7 +123,7 @@ def __init__( self.f = open(self.summaryfile, "w") else: self.f = f - self.txt = "\n{}:\n".format(self.prefix) + self.txt = f"\n{self.prefix}:\n" def _add_to_summary( self, @@ -338,9 +336,9 @@ def _stress_period_data_inactivecells(self, stress_period_data): self.summary_array = np.append(self.summary_array, sa).view( np.recarray ) - self.remove_passed(msg + "s") + self.remove_passed(f"{msg}s") else: - self.append_passed(msg + "s") + self.append_passed(f"{msg}s") def _list_spd_check_violations( self, @@ -558,7 +556,7 @@ def summarize(self): packages = self.summary_array.package desc = self.summary_array.desc self.summary_array["desc"] = [ - "\r {} package: {}".format(packages[i], d.strip()) + f"\r {packages[i]} package: {d.strip()}" if packages[i] != "model" else d for i, d in enumerate(desc) @@ -569,15 +567,15 @@ def summarize(self): desc = a.desc t = "" if len(a) > 0: - t += " {} {}s:\n".format(len(a), etype) + t += f" {len(a)} {etype}s:\n" if len(a) == 1: t = t.replace("s", "") # grammar for e in np.unique(desc): n = np.sum(desc == e) if n > 1: - t += " {} instances of {}\n".format(n, e) + t += f" {n} instances of {e}\n" else: - t += " {} instance of {}\n".format(n, e) + t += f" {n} instance of {e}\n" txt += t if txt == "": txt += " No errors or warnings encountered.\n" @@ -587,20 +585,20 @@ def summarize(self): and self.verbose and self.summary_array.shape[0] > 0 ): - txt += " see {} for details.\n".format(self.summaryfile) + txt += f" see {self.summaryfile} for details.\n" # print checks that passed for higher levels if len(self.passed) > 0 and self.level > 0: txt += "\n Checks that passed:\n" for chkname in self.passed: - txt += " {}\n".format(chkname) + txt += f" {chkname}\n" self.txt += txt # for level 2, print the whole summary table at the bottom if self.level > 1: # kludge to improve screen printing self.summary_array["package"] = [ - "{} ".format(s) for s in self.summary_array["package"] + f"{s} " for s in self.summary_array["package"] ] self.txt += "\nDETAILED SUMMARY:\n{}".format( self.print_summary(float_format="{:.2e}", delimiter="\t") @@ -611,7 +609,7 @@ def summarize(self): elif self.summary_array.shape[0] > 0 and self.level > 0: print("Errors and/or Warnings encountered.") if self.f is not None: - print(" see {} for details.\n".format(self.summaryfile)) + print(f" see {self.summaryfile} for details.\n") # start of older model specific code def _has_cell_indices(self, stress_period_data): @@ -753,7 +751,7 @@ def _fmt_string_list(array, float_format="{}"): ) else: raise Exception( - "MfList.fmt_string error: unknown vtype in dtype:" + vtype + f"MfList.fmt_string error: unknown vtype in dtype:{vtype}" ) return fmt_string diff --git a/flopy/utils/cvfdutil.py b/flopy/utils/cvfdutil.py index 4f9014b86..e31a1d7bb 100644 --- a/flopy/utils/cvfdutil.py +++ b/flopy/utils/cvfdutil.py @@ -180,11 +180,9 @@ def to_cvfd( nvertstart = 0 if verbose: print("Converting vertdict to cvfd representation.") - print("Number of cells in vertdict is: {}".format(len(vertdict))) + print(f"Number of cells in vertdict is: {len(vertdict)}") print( - "Cell {} up to {} (but not including) will be processed.".format( - nodestart, nodestop - ) + f"Cell {nodestart} up to {nodestop} (but not including) will be processed." ) for icell in range(nodestart, nodestop): points = vertdict[icell] @@ -203,18 +201,16 @@ def to_cvfd( iv += 1 ivertlist.append(ivert) if ivertlist[0] != ivertlist[-1]: - raise Exception("Cell {} not closed".format(icell)) + raise Exception(f"Cell {icell} not closed") vertexlist.append(ivertlist) # next create vertex_cell_dict = {}; for each vertex, store list of cells # that use it nvert = len(vertexdict) if verbose: - print("Started with {} vertices.".format(nvertstart)) - print("Ended up with {} vertices.".format(nvert)) - print( - "Reduced total number of vertices by {}".format(nvertstart - nvert) - ) + print(f"Started with {nvertstart} vertices.") + print(f"Ended up with {nvert} vertices.") + print(f"Reduced total number of vertices by {nvertstart - nvert}") print("Creating dict of vertices with their associated cells") vertex_cell_dict = {} for icell in range(nodestart, nodestop): @@ -270,7 +266,7 @@ def to_cvfd( def shapefile_to_cvfd(shp, **kwargs): import shapefile - print("Translating shapefile ({}) into cvfd format".format(shp)) + print(f"Translating shapefile ({shp}) into cvfd format") sf = shapefile.Reader(shp) shapes = sf.shapes() vertdict = {} @@ -299,7 +295,7 @@ def shapefile_to_xcyc(shp): """ import shapefile - print("Translating shapefile ({}) into cell centroids".format(shp)) + print(f"Translating shapefile ({shp}) into cell centroids") sf = shapefile.Reader(shp) shapes = sf.shapes() ncells = len(shapes) diff --git a/flopy/utils/datafile.py b/flopy/utils/datafile.py index e5966ef39..b919b0ef3 100755 --- a/flopy/utils/datafile.py +++ b/flopy/utils/datafile.py @@ -120,7 +120,7 @@ def __init__(self, filetype=None, precision="single"): "Available types are:".format(self.header_type) ) for idx, t in enumerate(self.header_types): - print(" {0} {1}".format(idx + 1, t)) + print(f" {idx + 1} {t}") return def get_dtype(self): @@ -164,7 +164,7 @@ def __init__(self, filename, precision, verbose, kwargs): self.file.seek(0, 0) # reset to beginning assert self.file.tell() == 0 if totalbytes == 0: - raise IOError("datafile error: file is empty: " + str(filename)) + raise IOError(f"datafile error: file is empty: {filename}") self.nrow = 0 self.ncol = 0 self.nlay = 0 @@ -178,7 +178,7 @@ def __init__(self, filename, precision, verbose, kwargs): elif precision == "double": self.realtype = np.float64 else: - raise Exception("Unknown precision specified: " + precision) + raise Exception(f"Unknown precision specified: {precision}") self.model = None self.dis = None @@ -194,7 +194,7 @@ def __init__(self, filename, precision, verbose, kwargs): self.mg = kwargs.pop("modelgrid") if len(kwargs.keys()) > 0: args = ",".join(kwargs.keys()) - raise Exception("LayerFile error: unrecognized kwargs: " + args) + raise Exception(f"LayerFile error: unrecognized kwargs: {args}") # read through the file and build the pointer index self._build_index() @@ -265,13 +265,11 @@ def to_shapefile( ).transpose() ).transpose() if mflay != None: - attrib_dict = { - "{}{}".format(attrib_name, mflay): plotarray[0, :, :] - } + attrib_dict = {f"{attrib_name}{mflay}": plotarray[0, :, :]} else: attrib_dict = {} for k in range(plotarray.shape[0]): - name = "{}{}".format(attrib_name, k) + name = f"{attrib_name}{k}" attrib_dict[name] = plotarray[k] from ..export.shapefile_utils import write_grid_shapefile @@ -285,7 +283,7 @@ def plot( totim=None, mflay=None, filename_base=None, - **kwargs + **kwargs, ): """ Plot 3-D model output data in a specific location @@ -374,8 +372,7 @@ def plot( i0 = 0 i1 = self.nlay filenames = [ - "{}_Layer{}.{}".format(filename_base, k + 1, fext) - for k in range(i0, i1) + f"{filename_base}_Layer{k + 1}.{fext}" for k in range(i0, i1) ] # make sure we have a (lay,row,col) shape plotarray @@ -394,7 +391,7 @@ def plot( filenames=filenames, mflay=mflay, modelgrid=self.mg, - **kwargs + **kwargs, ) def _build_index(self): @@ -427,7 +424,7 @@ def _get_data_array(self, totim=0): if totim >= 0.0: keyindices = np.where((self.recordarray["totim"] == totim))[0] if len(keyindices) == 0: - msg = "totim value ({}) not found in file...".format(totim) + msg = f"totim value ({totim}) not found in file..." raise Exception(msg) else: raise Exception("Data not found...") @@ -442,9 +439,7 @@ def _get_data_array(self, totim=0): ipos = self.iposarray[idx] ilay = self.recordarray["ilay"][idx] if self.verbose: - msg = "Byte position in file: {} for ".format( - ipos - ) + "layer {}".format(ilay) + msg = f"Byte position in file: {ipos} for layer {ilay}" print(msg) self.file.seek(ipos, 0) nrow = self.recordarray["nrow"][idx] @@ -524,7 +519,7 @@ def get_data(self, kstpkper=None, idx=None, totim=None, mflay=None): ) if idx[0].shape[0] == 0: raise Exception( - "get_data() error: kstpkper not found:{0}".format(kstpkper) + f"get_data() error: kstpkper not found:{kstpkper}" ) totim1 = self.recordarray[idx]["totim"][0] elif totim is not None: diff --git a/flopy/utils/datautil.py b/flopy/utils/datautil.py index c0c80abbc..594b111e4 100644 --- a/flopy/utils/datautil.py +++ b/flopy/utils/datautil.py @@ -344,14 +344,10 @@ def split_data_line(line, external_file=False, delimiter_conf_length=15): if index < len_cl: item = clean_line[index] if item[-1] in PyListUtil.quote_list: - arr_fixed_line[-1] = "{} {}".format( - arr_fixed_line[-1], item[:-1] - ) + arr_fixed_line[-1] += f" {item[:-1]}" break else: - arr_fixed_line[-1] = "{} {}".format( - arr_fixed_line[-1], item - ) + arr_fixed_line[-1] += f" {item}" else: # no quote, just append arr_fixed_line.append(item) @@ -392,13 +388,13 @@ def save_array_diff( def save_array(self, filename, multi_array): file_path = os.path.join(self.path, filename) with open(file_path, "w") as outfile: - outfile.write("{}\n".format(str(multi_array.shape))) + outfile.write(f"{multi_array.shape}\n") if len(multi_array.shape) == 4: for slice in multi_array: for second_slice in slice: for third_slice in second_slice: for item in third_slice: - outfile.write(" {:10.3e}".format(item)) + outfile.write(f" {item:10.3e}") outfile.write("\n") outfile.write("\n") outfile.write("\n") @@ -738,7 +734,7 @@ def __next__(self): if self.iter_num == 0 and self.first_not_numbered: return self.name else: - return "{}_{}".format(self.name, self.iter_num) + return f"{self.name}_{self.iter_num}" class PathIter: diff --git a/flopy/utils/flopy_io.py b/flopy/utils/flopy_io.py index 8fd8fdd79..325058931 100755 --- a/flopy/utils/flopy_io.py +++ b/flopy/utils/flopy_io.py @@ -33,7 +33,7 @@ def _fmt_string(array, float_format="{}"): if vtype == "i": fmt_string += "{:.0f} " elif vtype == "f": - fmt_string += "{} ".format(float_format) + fmt_string += f"{float_format} " elif vtype == "o": fmt_string += "{} " elif vtype == "s": @@ -44,7 +44,7 @@ def _fmt_string(array, float_format="{}"): ) else: raise Exception( - "MfList.fmt_string error: unknown vtype in dtype:" + vtype + f"MfList.fmt_string error: unknown vtype in dtype:{vtype}" ) return fmt_string @@ -192,21 +192,18 @@ def write_fixed_var(v, length=10, ipos=None, free=False, comment=None): if abs(v[n]) < vmin or abs(v[n]) > vmax: ctype = "g" # default precision is 6 if not specified else: - ctype = ".{}f".format(decimal) + ctype = f".{decimal}f" # evaluate if the fixed format value will exceed width - if ( - len("{{:>{}{}}}".format(width, ctype).format(v[n])) - > width - ): - ctype = ".{}g".format(decimal) # preserve precision + if len(f"{{:>{width}{ctype}}}".format(v[n])) > width: + ctype = f".{decimal}g" # preserve precision elif isinstance(v[n], (int, np.int32, np.int64)): ctype = "d" else: ctype = "" - write_fmt = "{{:>{}{}}}".format(width, ctype) + write_fmt = f"{{:>{width}{ctype}}}" out += write_fmt.format(v[n]) if comment is not None: - out += " # {}".format(comment) + out += f" # {comment}" out += "\n" return out @@ -430,7 +427,7 @@ def ulstrd(f, nlist, ra, model, sfac_columns, ext_unit_dict): # check for external if line.strip().lower().startswith("external"): inunit = int(line_list[1]) - errmsg = "Could not find a file for unit {}".format(inunit) + errmsg = f"Could not find a file for unit {inunit}" if ext_unit_dict is not None: if inunit in ext_unit_dict: namdata = ext_unit_dict[inunit] @@ -456,11 +453,7 @@ def ulstrd(f, nlist, ra, model, sfac_columns, ext_unit_dict): raw = [fname] fname = os.path.join(*raw) oc_filename = os.path.join(model.model_ws, fname) - msg = ( - "Package.load() error: open/close filename " - + oc_filename - + " not found" - ) + msg = f"Package.load() error: open/close filename {oc_filename} not found" assert os.path.exists(oc_filename), msg if "(binary)" in line.lower(): binary = True diff --git a/flopy/utils/formattedfile.py b/flopy/utils/formattedfile.py index 2efb79e00..df8a9668a 100644 --- a/flopy/utils/formattedfile.py +++ b/flopy/utils/formattedfile.py @@ -83,7 +83,7 @@ def read_header(self, text_file): or not is_int(arrheader[7]) ): raise Exception( - "Unexpected format for FHDTextHeader: " + header_text + f"Unexpected format for FHDTextHeader: {header_text}" ) headerinfo = np.empty([8], dtype=self.dtype) @@ -369,7 +369,7 @@ def __init__( text="head", precision="single", verbose=False, - **kwargs + **kwargs, ): self.text = text super().__init__(filename, precision, verbose, kwargs) diff --git a/flopy/utils/geometry.py b/flopy/utils/geometry.py index 5fab41d8e..7f3eca170 100644 --- a/flopy/utils/geometry.py +++ b/flopy/utils/geometry.py @@ -173,7 +173,7 @@ def __init__(self, geometries=()): super().__init__(geometries) def __repr__(self): - return "Shapes: {}".format(list(self)) + return f"Shapes: {list(self)}" @property def __geo_interface__(self): @@ -238,7 +238,7 @@ def __init__(self, polygons=()): super().__init__(polygons) def __repr__(self): - return "MultiPolygon: {}".format(list(self)) + return f"MultiPolygon: {list(self)}" @property def __geo_interface__(self): @@ -266,7 +266,7 @@ def __init__(self, linestrings=()): super().__init__(linestrings) def __repr__(self): - return "LineString: {}".format(list(self)) + return f"LineString: {list(self)}" @property def __geo_interface__(self): @@ -294,7 +294,7 @@ def __init__(self, points=()): super().__init__(points) def __repr__(self): - return "MultiPoint: {}".format(list(self)) + return f"MultiPoint: {list(self)}" @property def __geo_interface__(self): diff --git a/flopy/utils/gridgen.py b/flopy/utils/gridgen.py index 5c360b448..d20f198ff 100644 --- a/flopy/utils/gridgen.py +++ b/flopy/utils/gridgen.py @@ -72,7 +72,7 @@ def features_to_shapefile(features, featuretype, filename): "linestring", "polygon", ]: - raise Exception("Unrecognized feature type: {}".format(featuretype)) + raise Exception(f"Unrecognized feature type: {featuretype}") if featuretype.lower() in ("line", "linestring"): wr = shapefile.Writer(filename, shapeType=shapefile.POLYLINE) @@ -106,12 +106,12 @@ def ndarray_to_asciigrid(fname, a, extent, nodata=1.0e30): dx = (xmax - xmin) / ncol assert dx == (ymax - ymin) / nrow # header - header = "ncols {}\n".format(ncol) - header += "nrows {}\n".format(nrow) - header += "xllcorner {}\n".format(xmin) - header += "yllcorner {}\n".format(ymin) - header += "cellsize {}\n".format(dx) - header += "NODATA_value {}\n".format(float(nodata)) + header = f"ncols {ncol}\n" + header += f"nrows {nrow}\n" + header += f"xllcorner {xmin}\n" + header += f"yllcorner {ymin}\n" + header += f"cellsize {dx}\n" + header += f"NODATA_value {float(nodata)}\n" # replace nan with nodata idx = np.isnan(a) a[idx] = float(nodata) @@ -307,7 +307,7 @@ def set_surface_interpolation( "ymin, ymax: {}".format(elev_extent) ) - nm = "_gridgen.lay{}.asc".format(isurf) + nm = f"_gridgen.lay{isurf}.asc" fname = os.path.join(self.model_ws, nm) ndarray_to_asciigrid(fname, elev, elev_extent) self._asciigrid_dict[isurf] = nm @@ -354,7 +354,7 @@ def add_active_domain(self, feature, layers): self.nja = 0 # Create shapefile or set shapefile to feature - adname = "ad{}".format(len(self._addict)) + adname = f"ad{len(self._addict)}" if isinstance(feature, list): # Create a shapefile adname_w_path = os.path.join(self.model_ws, adname) @@ -364,8 +364,8 @@ def add_active_domain(self, feature, layers): shapefile = feature self._addict[adname] = shapefile - sn = os.path.join(self.model_ws, shapefile + ".shp") - assert os.path.isfile(sn), "Shapefile does not exist: {}".format(sn) + sn = os.path.join(self.model_ws, f"{shapefile}.shp") + assert os.path.isfile(sn), f"Shapefile does not exist: {sn}" for k in layers: self._active_domain[k] = adname @@ -404,7 +404,7 @@ def add_refinement_features(self, features, featuretype, level, layers): self.nja = 0 # Create shapefile or set shapefile to feature - rfname = "rf{}".format(len(self._rfdict)) + rfname = f"rf{len(self._rfdict)}" if isinstance(features, list): rfname_w_path = os.path.join(self.model_ws, rfname) features_to_shapefile(features, featuretype, rfname_w_path) @@ -413,8 +413,8 @@ def add_refinement_features(self, features, featuretype, level, layers): shapefile = features self._rfdict[rfname] = [shapefile, featuretype, level] - sn = os.path.join(self.model_ws, shapefile + ".shp") - assert os.path.isfile(sn), "Shapefile does not exist: {}".format(sn) + sn = os.path.join(self.model_ws, f"{shapefile}.shp") + assert os.path.isfile(sn), f"Shapefile does not exist: {sn}" for k in layers: self._refinement_features[k].append(rfname) @@ -549,7 +549,7 @@ def export(self, verbose=False): f.close() assert os.path.isfile( fname - ), "Could not create export dfn file: {}".format(fname) + ), f"Could not create export dfn file: {fname}" # Export shapefiles cmds = [ @@ -645,7 +645,7 @@ def plot( cmap="Dark2", a=None, masked_values=None, - **kwargs + **kwargs, ): """ Plot the grid. This method will plot the grid using the shapefile @@ -704,7 +704,7 @@ def plot( a=a, masked_values=masked_values, idx=idx, - **kwargs + **kwargs, ) plt.xlim(xmin, xmax) plt.ylim(ymin, ymax) @@ -807,9 +807,7 @@ def get_disu( # top top = [0] * nlay for k in range(nlay): - fname = os.path.join( - self.model_ws, "quadtreegrid.top{}.dat".format(k + 1) - ) + fname = os.path.join(self.model_ws, f"quadtreegrid.top{k + 1}.dat") f = open(fname, "r") tpk = np.empty((nodelay[k]), dtype=np.float32) tpk = read1d(f, tpk) @@ -822,16 +820,14 @@ def get_disu( (nodelay[k],), np.float32, np.reshape(tpk, (nodelay[k],)), - name="top {}".format(k + 1), + name=f"top {k + 1}", ) top[k] = tpk # bot bot = [0] * nlay for k in range(nlay): - fname = os.path.join( - self.model_ws, "quadtreegrid.bot{}.dat".format(k + 1) - ) + fname = os.path.join(self.model_ws, f"quadtreegrid.bot{k + 1}.dat") f = open(fname, "r") btk = np.empty((nodelay[k]), dtype=np.float32) btk = read1d(f, btk) @@ -844,7 +840,7 @@ def get_disu( (nodelay[k],), np.float32, np.reshape(btk, (nodelay[k],)), - name="bot {}".format(k + 1), + name=f"bot {k + 1}", ) bot[k] = btk @@ -867,7 +863,7 @@ def get_disu( (nodelay[k],), np.float32, np.reshape(ark, (nodelay[k],)), - name="area layer {}".format(k + 1), + name=f"area layer {k + 1}", ) area[k] = ark istart = istop @@ -1011,9 +1007,7 @@ def get_top(self): istart = 0 for k in range(nlay): istop = istart + nodelay[k] - fname = os.path.join( - self.model_ws, "quadtreegrid.top{}.dat".format(k + 1) - ) + fname = os.path.join(self.model_ws, f"quadtreegrid.top{k + 1}.dat") f = open(fname, "r") tpk = np.empty((nodelay[k]), dtype=np.float32) tpk = read1d(f, tpk) @@ -1039,9 +1033,7 @@ def get_bot(self): istart = 0 for k in range(nlay): istop = istart + nodelay[k] - fname = os.path.join( - self.model_ws, "quadtreegrid.bot{}.dat".format(k + 1) - ) + fname = os.path.join(self.model_ws, f"quadtreegrid.bot{k + 1}.dat") f = open(fname, "r") btk = np.empty((nodelay[k]), dtype=np.float32) btk = read1d(f, btk) @@ -1694,15 +1686,15 @@ def intersect(self, features, featuretype, layer): ifname = "intersect_feature" if isinstance(features, list): ifname_w_path = os.path.join(self.model_ws, ifname) - if os.path.exists(ifname_w_path + ".shp"): - os.remove(ifname_w_path + ".shp") + if os.path.exists(f"{ifname_w_path}.shp"): + os.remove(f"{ifname_w_path}.shp") features_to_shapefile(features, featuretype, ifname_w_path) shapefile = ifname else: shapefile = features - sn = os.path.join(self.model_ws, shapefile + ".shp") - assert os.path.isfile(sn), "Shapefile does not exist: {}".format(sn) + sn = os.path.join(self.model_ws, f"{shapefile}.shp") + assert os.path.isfile(sn), f"Shapefile does not exist: {sn}" fname = os.path.join(self.model_ws, "_intersect.dfn") if os.path.isfile(fname): @@ -1754,10 +1746,10 @@ def _intersection_block(self, shapefile, featuretype, layer): s = "" s += "BEGIN GRID_INTERSECTION intersect\n" s += " GRID = quadtreegrid\n" - s += " LAYER = {}\n".format(layer + 1) - s += " SHAPEFILE = {}\n".format(shapefile) - s += " FEATURE_TYPE = {}\n".format(featuretype) - s += " OUTPUT_FILE = {}\n".format("intersection.ifo") + s += f" LAYER = {layer + 1}\n" + s += f" SHAPEFILE = {shapefile}\n" + s += f" FEATURE_TYPE = {featuretype}\n" + s += " OUTPUT_FILE = intersection.ifo\n" s += "END GRID_INTERSECTION intersect\n" return s @@ -1771,17 +1763,17 @@ def _mfgrid_block(self): s = "" s += "BEGIN MODFLOW_GRID basegrid\n" - s += " ROTATION_ANGLE = {}\n".format(angrot) - s += " X_OFFSET = {}\n".format(xoff) - s += " Y_OFFSET = {}\n".format(yoff) - s += " NLAY = {}\n".format(self.nlay) - s += " NROW = {}\n".format(self.nrow) - s += " NCOL = {}\n".format(self.ncol) + s += f" ROTATION_ANGLE = {angrot}\n" + s += f" X_OFFSET = {xoff}\n" + s += f" Y_OFFSET = {yoff}\n" + s += f" NLAY = {self.nlay}\n" + s += f" NROW = {self.nrow}\n" + s += f" NCOL = {self.ncol}\n" # delr delr = self.dis.delr.array if delr.min() == delr.max(): - s += " DELR = CONSTANT {}\n".format(delr.min()) + s += f" DELR = CONSTANT {delr.min()}\n" else: s += " DELR = OPEN/CLOSE delr.dat\n" fname = os.path.join(self.model_ws, "delr.dat") @@ -1790,7 +1782,7 @@ def _mfgrid_block(self): # delc delc = self.dis.delc.array if delc.min() == delc.max(): - s += " DELC = CONSTANT {}\n".format(delc.min()) + s += f" DELC = CONSTANT {delc.min()}\n" else: s += " DELC = OPEN/CLOSE delc.dat\n" fname = os.path.join(self.model_ws, "delc.dat") @@ -1799,7 +1791,7 @@ def _mfgrid_block(self): # top top = self.dis.top.array if top.min() == top.max(): - s += " TOP = CONSTANT {}\n".format(top.min()) + s += f" TOP = CONSTANT {top.min()}\n" else: s += " TOP = OPEN/CLOSE top.dat\n" fname = os.path.join(self.model_ws, "top.dat") @@ -1813,14 +1805,12 @@ def _mfgrid_block(self): else: bot = botm[k] if bot.min() == bot.max(): - s += " BOTTOM LAYER {} = CONSTANT {}\n".format( - k + 1, bot.min() - ) + s += f" BOTTOM LAYER {k + 1} = CONSTANT {bot.min()}\n" else: s += " BOTTOM LAYER {0} = OPEN/CLOSE bot{0}.dat\n".format( k + 1 ) - fname = os.path.join(self.model_ws, "bot{}.dat".format(k + 1)) + fname = os.path.join(self.model_ws, f"bot{k + 1}.dat") np.savetxt(fname, bot) s += "END MODFLOW_GRID\n" @@ -1830,10 +1820,10 @@ def _rf_blocks(self): s = "" for rfname, rf in self._rfdict.items(): shapefile, featuretype, level = rf - s += "BEGIN REFINEMENT_FEATURES {}\n".format(rfname) - s += " SHAPEFILE = {}\n".format(shapefile) - s += " FEATURE_TYPE = {}\n".format(featuretype) - s += " REFINEMENT_LEVEL = {}\n".format(level) + s += f"BEGIN REFINEMENT_FEATURES {rfname}\n" + s += f" SHAPEFILE = {shapefile}\n" + s += f" FEATURE_TYPE = {featuretype}\n" + s += f" REFINEMENT_LEVEL = {level}\n" s += "END REFINEMENT_FEATURES\n" s += 2 * "\n" return s @@ -1841,10 +1831,10 @@ def _rf_blocks(self): def _ad_blocks(self): s = "" for adname, shapefile in self._addict.items(): - s += "BEGIN ACTIVE_DOMAIN {}\n".format(adname) - s += " SHAPEFILE = {}\n".format(shapefile) - s += " FEATURE_TYPE = {}\n".format("polygon") - s += " INCLUDE_BOUNDARY = {}\n".format("True") + s += f"BEGIN ACTIVE_DOMAIN {adname}\n" + s += f" SHAPEFILE = {shapefile}\n" + s += " FEATURE_TYPE = polygon\n" + s += " INCLUDE_BOUNDARY = True\n" s += "END ACTIVE_DOMAIN\n" s += 2 * "\n" return s @@ -1857,15 +1847,15 @@ def _builder_block(self): for k, adk in enumerate(self._active_domain): if adk is None: continue - s += " ACTIVE_DOMAIN LAYER {} = {}\n".format(k + 1, adk) + s += f" ACTIVE_DOMAIN LAYER {k + 1} = {adk}\n" # Write refinement feature information for k, rfkl in enumerate(self._refinement_features): if len(rfkl) == 0: continue - s += " REFINEMENT_FEATURES LAYER {} = ".format(k + 1) + s += f" REFINEMENT_FEATURES LAYER {k + 1} = " for rf in rfkl: - s += rf + " " + s += f"{rf} " s += "\n" s += " SMOOTHING = full\n" @@ -1875,18 +1865,14 @@ def _builder_block(self): grd = self._asciigrid_dict[k] else: grd = "basename" - s += " TOP LAYER {} = {} {}\n".format( - k + 1, self.surface_interpolation[k], grd - ) + s += f" TOP LAYER {k + 1} = {self.surface_interpolation[k]} {grd}\n" for k in range(self.nlay): if self.surface_interpolation[k + 1] == "ASCIIGRID": grd = self._asciigrid_dict[k + 1] else: grd = "basename" - s += " BOTTOM LAYER {} = {} {}\n".format( - k + 1, self.surface_interpolation[k + 1], grd - ) + s += f" BOTTOM LAYER {k + 1} = {self.surface_interpolation[k + 1]} {grd}\n" s += " GRID_DEFINITION_FILE = quadtreegrid.dfn\n" s += "END QUADTREE_BUILDER\n" @@ -1908,9 +1894,7 @@ def _grid_export_blocks(self): s += "BEGIN GRID_TO_USGDATA grid_to_usgdata\n" s += " GRID = quadtreegrid\n" s += " USG_DATA_PREFIX = qtg\n" - s += " VERTICAL_PASS_THROUGH = {0}\n".format( - self.vertical_pass_through - ) + s += f" VERTICAL_PASS_THROUGH = {self.vertical_pass_through}\n" s += "END GRID_TO_USGDATA\n" s += "\n" s += "BEGIN GRID_TO_VTKFILE grid_to_vtk\n" @@ -1941,7 +1925,7 @@ def _mkvertdict(self): fname = os.path.join(self.model_ws, "qtg.nod") if not os.path.isfile(fname): raise Exception( - "File {} should have been created by gridgen.".format(fname) + f"File {fname} should have been created by gridgen." ) f = open(fname, "r") line = f.readline() diff --git a/flopy/utils/gridintersect.py b/flopy/utils/gridintersect.py index 09137b9cf..af6651b34 100644 --- a/flopy/utils/gridintersect.py +++ b/flopy/utils/gridintersect.py @@ -263,7 +263,7 @@ def intersect(self, shp, **kwargs): else: rec = self._intersect_polygon_shapely(shp, sort_by_cellid) else: - err = "Shapetype {} is not supported".format(gu.shapetype) + err = f"Shapetype {gu.shapetype} is not supported" raise TypeError(err) return rec @@ -320,7 +320,7 @@ def _vtx_grid_to_shape_generator(self): for icell in self.mfgrid._cell2d.icell2d: points = [] icverts = [ - "icvert_{}".format(i) + f"icvert_{i}" for i in range(self.mfgrid._cell2d["ncvert"][icell]) ] for iv in self.mfgrid._cell2d[icverts][icell]: @@ -1503,7 +1503,7 @@ def plot_polygon(rec, ax=None, **kwargs): if "facecolor" in kwargs: fc = kwargs.pop("facecolor") else: - fc = "C{}".format(i % 10) + fc = f"C{i % 10}" ppi = PolygonPatch(ishp, facecolor=fc, **kwargs) ax.add_patch(ppi) @@ -1555,7 +1555,7 @@ def plot_linestring(rec, ax=None, cmap=None, **kwargs): for i, ishp in enumerate(rec.ixshapes): if not specified_color: if cmap is None: - c = "C{}".format(i % 10) + c = f"C{i % 10}" else: c = colors[i] if ishp.type == "MultiLineString": diff --git a/flopy/utils/mflistfile.py b/flopy/utils/mflistfile.py index 35523d052..3af271a48 100644 --- a/flopy/utils/mflistfile.py +++ b/flopy/utils/mflistfile.py @@ -44,9 +44,7 @@ class ListBudget: def __init__(self, file_name, budgetkey=None, timeunit="days"): # Set up file reading - assert os.path.exists(file_name), "file_name {0} not found".format( - file_name - ) + assert os.path.exists(file_name), f"file_name {file_name} not found" self.file_name = file_name self.f = open(file_name, "r", encoding="ascii", errors="replace") @@ -412,16 +410,14 @@ def get_data(self, kstpkper=None, idx=None, totim=None, incremental=False): ipos = self.get_kstpkper().index(kstpkper) except: print( - " could not retrieve kstpkper " - "{} from the lst file".format(kstpkper) + f" could not retrieve kstpkper {kstpkper} from the lst file" ) elif totim is not None: try: ipos = self.get_times().index(totim) except: print( - " could not retrieve totime " - "{} from the lst file".format(totim) + f" could not retrieve totime {totim} from the lst file" ) elif idx is not None: ipos = idx @@ -430,8 +426,8 @@ def get_data(self, kstpkper=None, idx=None, totim=None, incremental=False): if ipos is None: print("Could not find specified condition.") - print(" kstpkper = {}".format(kstpkper)) - print(" totim = {}".format(totim)) + print(f" kstpkper = {kstpkper}") + print(f" totim = {totim}") # TODO: return zero-length array, or update docstring return type return None @@ -481,7 +477,7 @@ def get_dataframes(self, start_datetime="1-1-1970", diff=False): try: import pandas as pd except Exception as e: - msg = "ListBudget.get_dataframe(): requires pandas: " + str(e) + msg = f"ListBudget.get_dataframe(): requires pandas: {e!s}" raise ImportError(msg) if not self._isvalid: @@ -505,8 +501,8 @@ def get_dataframes(self, start_datetime="1-1-1970", diff=False): base_names = [name.replace("_IN", "") for name in in_names] for name in base_names: - in_name = name + "_IN" - out_name = name + "_OUT" + in_name = f"{name}_IN" + out_name = f"{name}_OUT" df_flux.loc[:, name.lower()] = ( df_flux.loc[:, in_name] - df_flux.loc[:, out_name] ) @@ -674,7 +670,7 @@ def _set_entries(self): if len(self.idx_map) < 1: return None, None if len(self.entries) > 0: - raise Exception("entries already set:" + str(self.entries)) + raise Exception(f"entries already set:{self.entries}") if not self.idx_map: raise Exception("must call build_index before call set_entries") try: @@ -813,10 +809,10 @@ def _get_sp(self, ts, sp, seekpoint): if entry in entrydict: entrydict[entry] += 1 inum = entrydict[entry] - entry = "{}{}".format(entry, inum + 1) + entry = f"{entry}{inum + 1}" else: entrydict[entry] = 0 - key = "{}_{}".format(entry, tag) + key = f"{entry}_{tag}" incdict[key] = flux cumdict[key] = cumu else: diff --git a/flopy/utils/mfreadnam.py b/flopy/utils/mfreadnam.py index ffd0c167b..906c4340c 100644 --- a/flopy/utils/mfreadnam.py +++ b/flopy/utils/mfreadnam.py @@ -65,9 +65,7 @@ def __init__(self, pkgtype, name, handle, packages): self.package = packages[self.filetype.lower()] def __repr__(self): - return "filename:{0}, filetype:{1}".format( - self.filename, self.filetype - ) + return f"filename:{self.filename}, filetype:{self.filetype}" def getfiletypeunit(nf, filetype): @@ -87,7 +85,7 @@ def getfiletypeunit(nf, filetype): for cunit, cvals in nf.items(): if cvals.filetype.lower() == filetype.lower(): return cunit - print('Name file does not contain file of type "{0}"'.format(filetype)) + print(f'Name file does not contain file of type "{filetype}"') return None @@ -123,14 +121,14 @@ def parsenamefile(namfilename, packages, verbose=True): ext_unit_dict = {} if verbose: - print("Parsing the namefile --> {0:s}".format(namfilename)) + print(f"Parsing the namefile --> {namfilename}") if not os.path.isfile(namfilename): # help diagnose the namfile and directory - e = "Could not find {} ".format( - namfilename - ) + "in directory {}".format(os.path.dirname(namfilename)) - raise IOError(e) + raise IOError( + f"Could not find {namfilename} " + f"in directory {os.path.dirname(namfilename)}" + ) with open(namfilename, "r") as fp: lines = fp.readlines() @@ -142,7 +140,7 @@ def parsenamefile(namfilename, packages, verbose=True): items = line.split() # ensure we have at least three items if len(items) < 3: - e = "line number {} has fewer than 3 items: {}".format(ln, line) + e = f"line number {ln} has fewer than 3 items: {line}" raise ValueError(e) ftype, key, fpath = items[0:3] ftype = ftype.upper() @@ -183,7 +181,7 @@ def parsenamefile(namfilename, packages, verbose=True): filehandle = open(fname, openmode, **kwargs) except IOError: if verbose: - print("could not set filehandle to {0:s}".format(fpath)) + print(f"could not set filehandle to {fpath}") filehandle = None # be sure the second value is an integer try: @@ -232,31 +230,31 @@ def attribs_from_namfile_header(namefile): xll = float(item.split(":")[1]) defaults["xll"] = xll except: - print(" could not parse xll in {}".format(namefile)) + print(f" could not parse xll in {namefile}") elif "yll" in item.lower(): try: yll = float(item.split(":")[1]) defaults["yll"] = yll except: - print(" could not parse yll in {}".format(namefile)) + print(f" could not parse yll in {namefile}") elif "xul" in item.lower(): try: xul = float(item.split(":")[1]) defaults["xul"] = xul except: - print(" could not parse xul in {}".format(namefile)) + print(f" could not parse xul in {namefile}") elif "yul" in item.lower(): try: yul = float(item.split(":")[1]) defaults["yul"] = yul except: - print(" could not parse yul in {}".format(namefile)) + print(f" could not parse yul in {namefile}") elif "rotation" in item.lower(): try: angrot = float(item.split(":")[1]) defaults["rotation"] = angrot except: - print(" could not parse rotation in {}".format(namefile)) + print(f" could not parse rotation in {namefile}") elif "proj4_str" in item.lower(): try: proj4 = ":".join(item.split(":")[1:]).strip() @@ -264,11 +262,11 @@ def attribs_from_namfile_header(namefile): proj4 = None defaults["proj4_str"] = proj4 except: - print(" could not parse proj4_str in {}".format(namefile)) + print(f" could not parse proj4_str in {namefile}") elif "start" in item.lower(): try: start_datetime = item.split(":")[1].strip() defaults["start_datetime"] = start_datetime except: - print(" could not parse start in {}".format(namefile)) + print(f" could not parse start in {namefile}") return defaults diff --git a/flopy/utils/modpathfile.py b/flopy/utils/modpathfile.py index 44f84d0fd..326a30592 100644 --- a/flopy/utils/modpathfile.py +++ b/flopy/utils/modpathfile.py @@ -56,13 +56,10 @@ def _build_index(self): if isinstance(line, bytes): line = line.decode() if self.skiprows < 1: - if ( - "MODPATH_{}_FILE 6".format(self.output_type) - in line.upper() - ): + if f"MODPATH_{self.output_type}_FILE 6" in line.upper(): self.version = 6 elif ( - "MODPATH_{}_FILE 7".format(self.output_type) + f"MODPATH_{self.output_type}_FILE 7" in line.upper() ): self.version = 7 @@ -278,7 +275,7 @@ def write_shapefile( shpname="endpoints.shp", mg=None, epsg=None, - **kwargs + **kwargs, ): """ Write pathlines or timeseries to a shapefile @@ -748,7 +745,7 @@ def write_shapefile( shpname="pathlines.shp", mg=None, epsg=None, - **kwargs + **kwargs, ): """ Write pathlines to a shapefile @@ -786,7 +783,7 @@ def write_shapefile( shpname=shpname, mg=mg, epsg=epsg, - **kwargs + **kwargs, ) @@ -878,9 +875,7 @@ def _build_index(self): else: self.version = None if self.version is None: - errmsg = "{} is not a valid endpoint file".format( - self.fname - ) + errmsg = f"{self.fname} is not a valid endpoint file" raise Exception(errmsg) self.skiprows += 1 if self.version == 6 or self.version == 7: @@ -897,7 +892,7 @@ def _build_index(self): self.file.seek(0) if self.verbose: - print("MODPATH version {} endpoint file".format(self.version)) + print(f"MODPATH version {self.version} endpoint file") def _get_dtypes(self): """ @@ -1171,10 +1166,7 @@ def get_destination_endpoint_data(self, dest_cells, source=False): try: raslice = ra_slice(ra, keys) except (KeyError, ValueError): - msg = ( - "could not extract '{}' ".format(keys[0]) - + "key from endpoint data" - ) + msg = f"could not extract '{keys[0]}' key from endpoint data" raise KeyError(msg) if isinstance(dest_cells, (list, tuple)): allint = all(isinstance(el, int) for el in dest_cells) @@ -1201,7 +1193,7 @@ def write_shapefile( direction="ending", mg=None, epsg=None, - **kwargs + **kwargs, ): """ Write particle starting / ending locations to shapefile. @@ -1353,7 +1345,7 @@ def _build_index(self): self.version = None if self.version is None: raise Exception( - "{} is not a valid timeseries file".format(self.fname) + f"{self.fname} is not a valid timeseries file" ) self.skiprows += 1 if self.version == 6 or self.version == 7: @@ -1590,7 +1582,7 @@ def write_shapefile( shpname="pathlines.shp", mg=None, epsg=None, - **kwargs + **kwargs, ): """ Write pathlines to a shapefile @@ -1628,5 +1620,5 @@ def write_shapefile( shpname=shpname, mg=mg, epsg=epsg, - **kwargs + **kwargs, ) diff --git a/flopy/utils/mtlistfile.py b/flopy/utils/mtlistfile.py index ab570b7c3..31cb63ad9 100644 --- a/flopy/utils/mtlistfile.py +++ b/flopy/utils/mtlistfile.py @@ -265,9 +265,7 @@ def _parse_gw(self, f, line): totim = float(line.split()[-2]) except Exception as e: raise Exception( - "error parsing totim on line {0}: {1}".format( - self.lcount, str(e) - ) + f"error parsing totim on line {self.lcount}: {e!s}" ) for _ in range(3): @@ -284,14 +282,12 @@ def _parse_gw(self, f, line): tkstp = int(tkstp_str) except Exception as e: raise Exception( - "error parsing time step info on line {0}: {1}".format( - self.lcount, str(e) - ) + f"error parsing time step info on line {self.lcount}: {e!s}" ) for lab, val in zip( ["totim", "kper", "kstp", "tkstp"], [totim, kper, kstp, tkstp] ): - lab += "_{0}".format(comp) + lab += f"_{comp}" if lab not in self.gw_data.keys(): self.gw_data[lab] = [] self.gw_data[lab].append(val) @@ -315,9 +311,7 @@ def _parse_gw(self, f, line): item, ival, oval = self._parse_gw_line(line) except Exception as e: raise Exception( - "error parsing GW items on line {0}: {1}".format( - self.lcount, str(e) - ) + f"error parsing GW items on line {self.lcount}: {e!s}" ) self._add_to_gw_data(item, ival, oval, comp) if break_next: @@ -344,8 +338,7 @@ def _parse_gw(self, f, line): item, ival, oval = self._parse_gw_line(line) except Exception as e: raise Exception( - "error parsing GW items " - "on line {0}: {1}".format(self.lcount, str(e)) + f"error parsing GW items on line {self.lcount}: {e!s}" ) self._add_to_gw_data(item, ival, oval, comp) if "discrepancy" in item: @@ -358,7 +351,7 @@ def _parse_gw_line(self, line): idx_ival = 0 idx_oval = 1 if self.imm: - item = "imm_" + item + item = f"imm_{item}" if "TOTAL" in item.upper(): idx_oval += 1 # to deal with the units in the total string # net (in-out) and discrepancy will only have 1 entry @@ -371,7 +364,7 @@ def _parse_gw_line(self, line): return item, ival, oval def _add_to_gw_data(self, item, ival, oval, comp): - item += "_{0}".format(comp) + item += f"_{comp}" if oval is None: lab_val = zip([""], [ival], [""]) else: @@ -394,12 +387,10 @@ def _parse_sw(self, f, line): tkstp = int(tkstp_str) except Exception as e: raise Exception( - "error parsing time step info on line {0}: {1}".format( - self.lcount, str(e) - ) + f"error parsing time step info on line {self.lcount}: {e!s}" ) for lab, val in zip(["kper", "kstp", "tkstp"], [kper, kstp, tkstp]): - lab += "_{0}".format(comp) + lab += f"_{comp}" if lab not in self.sw_data.keys(): self.sw_data[lab] = [] self.sw_data[lab].append(val) @@ -420,8 +411,7 @@ def _parse_sw(self, f, line): item, cval, fval = self._parse_sw_line(line) except Exception as e: raise Exception( - "error parsing 'in' SW items on line {}: " - "{}".format(self.lcount, str(e)) + f"error parsing 'in' SW items on line {self.lcount}: {e!s}" ) self._add_to_sw_data("in", item, cval, fval, comp) if break_next: @@ -442,9 +432,7 @@ def _parse_sw(self, f, line): item, cval, fval = self._parse_sw_line(line) except Exception as e: raise Exception( - "error parsing 'out' SW items on line {0}: {1}".format( - self.lcount, str(e) - ) + f"error parsing 'out' SW items on line {self.lcount}: {e!s}" ) self._add_to_sw_data("out", item, cval, fval, comp) if break_next: @@ -467,9 +455,7 @@ def _parse_sw(self, f, line): item, cval, fval = self._parse_sw_line(line) except Exception as e: raise Exception( - "error parsing 'out' SW items on line {0}: {1}".format( - self.lcount, str(e) - ) + f"error parsing 'out' SW items on line {self.lcount}: {e!s}" ) self._add_to_sw_data("net", item, cval, fval, comp) # out_tots = self._parse_sw_line(line) @@ -489,9 +475,9 @@ def _parse_sw_line(self, line): return citem, cval, fval def _add_to_sw_data(self, inout, item, cval, fval, comp): - item += "_{0}".format(comp) + item += f"_{comp}" if inout.lower() in set(["in", "out"]): - item += "_{0}".format(inout) + item += f"_{inout}" if fval is None: lab_val = zip([""], [cval]) else: diff --git a/flopy/utils/observationfile.py b/flopy/utils/observationfile.py index a463e0a9e..7984f65da 100644 --- a/flopy/utils/observationfile.py +++ b/flopy/utils/observationfile.py @@ -178,7 +178,7 @@ def get_dataframe( import pandas as pd from ..utils.utils_def import totim_to_datetime except Exception as e: - msg = "ObsFiles.get_dataframe() error import pandas: " + str(e) + msg = f"ObsFiles.get_dataframe() error import pandas: {e!s}" raise ImportError(msg) i0 = 0 @@ -574,9 +574,9 @@ def get_selection(data, names): for name in names: if name not in data.dtype.names: ierr += 1 - print("Error: {} is not a valid column name".format(name)) + print(f"Error: {name} is not a valid column name") if ierr > 0: - raise Exception("Error: {} names did not match".format(ierr)) + raise Exception(f"Error: {ierr} names did not match") # Valid list of names so make a selection dtype2 = np.dtype({name: data.dtype.fields[name] for name in names}) diff --git a/flopy/utils/optionblock.py b/flopy/utils/optionblock.py index eca623347..db377436d 100644 --- a/flopy/utils/optionblock.py +++ b/flopy/utils/optionblock.py @@ -273,7 +273,7 @@ def _set_attributes(self): ix += 1 else: - err_msg = "Option: {} not a valid option".format(t[ix]) + err_msg = f"Option: {t[ix]} not a valid option" raise KeyError(err_msg) else: @@ -352,8 +352,7 @@ def load_options(cls, options, package): options = open(options, "r") except IOError: err_msg = ( - "Unrecognized type for options" - " variable: {}".format(type(options)) + f"Unrecognized type for options variable: {type(options)}" ) raise TypeError(err_msg) @@ -389,10 +388,7 @@ def load_options(cls, options, package): valid = True if not valid: - err_msg = ( - "Invalid type set to variable " - "{} in option block".format(k) - ) + err_msg = f"Invalid type set to variable {k} in option block" raise TypeError(err_msg) option_line += t[ix] + " " @@ -479,10 +475,7 @@ def isvalid(dtype, val): pass if not valid: - err_msg = ( - "Invalid type set to variable " - "{} in option block".format(val) - ) + err_msg = f"Invalid type set to variable {val} in option block" raise TypeError(err_msg) return valid diff --git a/flopy/utils/postprocessing.py b/flopy/utils/postprocessing.py index cf115815f..ed02ead9b 100644 --- a/flopy/utils/postprocessing.py +++ b/flopy/utils/postprocessing.py @@ -377,8 +377,7 @@ def get_extended_budget( matched_name = [s for s in rec_names if budget_term in s] if not matched_name: raise RuntimeError( - "Budget term " + budget_term + " not found" - ' in "' + cbcfile + '" file.' + f'Budget term {budget_term} not found in "{cbcfile}" file.' ) if len(matched_name) > 1: raise RuntimeError( @@ -696,9 +695,7 @@ def get_specific_discharge( qy = tqy / cross_area_y qz = tqz / cross_area_z else: - raise ValueError( - '"' + position + '" is not a valid value for ' "position" - ) + raise ValueError(f'"{position}" is not a valid value for position') if position == "vertices": qx = modelgrid.array_at_verts(qx) qy = modelgrid.array_at_verts(qy) diff --git a/flopy/utils/rasters.py b/flopy/utils/rasters.py index 21d4928cd..53f4a1785 100644 --- a/flopy/utils/rasters.py +++ b/flopy/utils/rasters.py @@ -489,7 +489,7 @@ def resample_to_grid( data[node] = val else: - raise TypeError("{} method not supported".format(method)) + raise TypeError(f"{method} method not supported") if extrapolate_edges and method != "nearest": xc = modelgrid.xcellcenters @@ -941,7 +941,7 @@ def plot(self, ax=None, contour=False, **kwargs): ax=ax, contour=contour, transform=self._meta["transform"], - **kwargs + **kwargs, ) return ax diff --git a/flopy/utils/sfroutputfile.py b/flopy/utils/sfroutputfile.py index e2467d24f..1cb50b2f3 100644 --- a/flopy/utils/sfroutputfile.py +++ b/flopy/utils/sfroutputfile.py @@ -80,9 +80,7 @@ def __init__(self, filename, geometries=None, verbose=False): break if not evaluated_format: raise ValueError( - "could not evaluate format of {!r} for SfrFile".format( - self.filename - ) + f"could not evaluate format of {self.filename!r} for SfrFile" ) # all outputs start with the same 15 columns self.names = [ @@ -259,5 +257,5 @@ def get_results(self, segment, reach): if len(srresults) > 0: results = results.append(srresults) else: - print("No results for segment {}, reach {}!".format(s, r)) + print(f"No results for segment {s}, reach {r}!") return results diff --git a/flopy/utils/swroutputfile.py b/flopy/utils/swroutputfile.py index 9bcfc8a2e..af6c920a7 100644 --- a/flopy/utils/swroutputfile.py +++ b/flopy/utils/swroutputfile.py @@ -66,12 +66,9 @@ def __init__( if swrtype.lower() in self.types: self.type = swrtype.lower() else: - err = ( - "SWR type ({}) is not defined. ".format(type) - + "Available types are:\n" - ) + err = f"SWR type ({type}) is not defined. Available types are:\n" for t in self.types: - err = "{} {}\n".format(err, t) + err += f" {t}\n" raise Exception(err) # set data dtypes diff --git a/flopy/utils/triangle.py b/flopy/utils/triangle.py index 82fa0b4da..181d5533a 100644 --- a/flopy/utils/triangle.py +++ b/flopy/utils/triangle.py @@ -151,21 +151,21 @@ def build(self, verbose=False): self.clean() # write the active domain to a file - fname = os.path.join(self.model_ws, self.file_prefix + ".0.node") + fname = os.path.join(self.model_ws, f"{self.file_prefix}.0.node") self._write_nodefile(fname) # poly file - fname = os.path.join(self.model_ws, self.file_prefix + ".0.poly") + fname = os.path.join(self.model_ws, f"{self.file_prefix}.0.poly") self._write_polyfile(fname) # Construct the triangle command cmds = [self.exe_name] if self.maximum_area is not None: - cmds.append("-a{}".format(self.maximum_area)) + cmds.append(f"-a{self.maximum_area}") else: cmds.append("-a") if self.angle is not None: - cmds.append("-q{}".format(self.angle)) + cmds.append(f"-q{self.angle}") if self.additional_args is not None: cmds += self.additional_args cmds.append("-A") # assign attributes @@ -174,7 +174,7 @@ def build(self, verbose=False): cmds.append("-D") # delaunay triangles for finite volume cmds.append("-e") # edge file cmds.append("-n") # neighbor file - cmds.append(self.file_prefix + ".0") # output file name + cmds.append(f"{self.file_prefix}.0") # output file name # run Triangle buff = subprocess.check_output(cmds, cwd=self.model_ws) @@ -205,7 +205,7 @@ def plot( cmap="Dark2", a=None, masked_values=None, - **kwargs + **kwargs, ): """ Plot the grid. This method will plot the grid using the shapefile @@ -262,7 +262,7 @@ def plot( masked_values=masked_values, cmap=cmap, edgecolor=edgecolor, - **kwargs + **kwargs, ) return pc @@ -617,18 +617,18 @@ def clean(self): """ # remove input files for ext in ["poly", "node"]: - fname = os.path.join(self.model_ws, self.file_prefix + "0." + ext) + fname = os.path.join(self.model_ws, f"{self.file_prefix}0.{ext}") if os.path.isfile(fname): os.remove(fname) if os.path.isfile(fname): - print("Could not remove: {}".format(fname)) + print(f"Could not remove: {fname}") # remove output files for ext in ["poly", "ele", "node", "neigh", "edge"]: - fname = os.path.join(self.model_ws, self.file_prefix + "1." + ext) + fname = os.path.join(self.model_ws, f"{self.file_prefix}1.{ext}") if os.path.isfile(fname): os.remove(fname) if os.path.isfile(fname): - print("Could not remove: {}".format(fname)) + print(f"Could not remove: {fname}") return def _initialize_vars(self): @@ -649,7 +649,7 @@ def _load_results(self): # node file ext = "node" dt = [("ivert", int), ("x", float), ("y", float)] - fname = os.path.join(self.model_ws, self.file_prefix + ".1." + ext) + fname = os.path.join(self.model_ws, f"{self.file_prefix}.1.{ext}") setattr(self, ext, None) if os.path.isfile(fname): f = open(fname, "r") @@ -672,7 +672,7 @@ def _load_results(self): # ele file ext = "ele" dt = [("icell", int), ("iv1", int), ("iv2", int), ("iv3", int)] - fname = os.path.join(self.model_ws, self.file_prefix + ".1." + ext) + fname = os.path.join(self.model_ws, f"{self.file_prefix}.1.{ext}") setattr(self, ext, None) if os.path.isfile(fname): f = open(fname, "r") @@ -692,7 +692,7 @@ def _load_results(self): # edge file ext = "edge" dt = [("iedge", int), ("endpoint1", int), ("endpoint2", int)] - fname = os.path.join(self.model_ws, self.file_prefix + ".1." + ext) + fname = os.path.join(self.model_ws, f"{self.file_prefix}.1.{ext}") setattr(self, ext, None) if os.path.isfile(fname): f = open(fname, "r") @@ -715,7 +715,7 @@ def _load_results(self): ("neighbor2", int), ("neighbor3", int), ] - fname = os.path.join(self.model_ws, self.file_prefix + ".1." + ext) + fname = os.path.join(self.model_ws, f"{self.file_prefix}.1.{ext}") setattr(self, ext, None) if os.path.isfile(fname): f = open(fname, "r") @@ -738,19 +738,17 @@ def _write_nodefile(self, fname): nvert += len(p) if self._nodes is not None: nvert += self._nodes.shape[0] - s = "{} {} {} {}\n".format(nvert, 2, 0, 0) + s = f"{nvert} 2 0 0\n" f.write(s) ip = 0 for p in self._polygons: for vertex in p: - s = "{} {} {}\n".format(ip, vertex[0], vertex[1]) + s = f"{ip} {vertex[0]} {vertex[1]}\n" f.write(s) ip += 1 if self._nodes is not None: for i in range(self._nodes.shape[0]): - s = "{} {} {}\n".format( - ip, self._nodes[i, 0], self._nodes[i, 1] - ) + s = f"{ip} {self._nodes[i, 0]} {self._nodes[i, 1]}\n" f.write(s) ip += 1 f.close() @@ -767,7 +765,7 @@ def _write_polyfile(self, fname): for p in self._polygons: nseg += len(p) bm = 1 - s = "{} {}\n".format(nseg, bm) + s = f"{nseg} {bm}\n" f.write(s) iseg = 0 @@ -781,22 +779,22 @@ def _write_polyfile(self, fname): ep2 = 0 ep1 += ipstart ep2 += ipstart - s = "{} {} {} {}\n".format(iseg, ep1, ep2, iseg + 1) + s = f"{iseg} {ep1} {ep2} {iseg + 1}\n" f.write(s) iseg += 1 ipstart += len(p) # holes nholes = len(self._holes) - s = "{}\n".format(nholes) + s = f"{nholes}\n" f.write(s) for i, hole in enumerate(self._holes): - s = "{} {} {}\n".format(i, hole[0], hole[1]) + s = f"{i} {hole[0]} {hole[1]}\n" f.write(s) # regions nregions = len(self._regions) - s = "{}\n".format(nregions) + s = f"{nregions}\n" f.write(s) for i, region in enumerate(self._regions): pt = region[0] @@ -804,7 +802,7 @@ def _write_polyfile(self, fname): maxarea = region[2] if maxarea is None: maxarea = -1.0 - s = "{} {} {} {} {}\n".format(i, pt[0], pt[1], attribute, maxarea) + s = f"{i} {pt[0]} {pt[1]} {attribute} {maxarea}\n" f.write(s) f.close() diff --git a/flopy/utils/util_array.py b/flopy/utils/util_array.py index 4043bfdec..d9c22a9c5 100644 --- a/flopy/utils/util_array.py +++ b/flopy/utils/util_array.py @@ -76,7 +76,7 @@ def __init__(self, u2d, python=None, fortran=None, array_free_format=None): assert isinstance( u2d, Util2d - ), "ArrayFormat only supports Util2d, not {0}".format(type(u2d)) + ), f"ArrayFormat only supports Util2d, not {type(u2d)}" if len(u2d.shape) == 1: self._npl_full = u2d.shape[0] else: @@ -144,7 +144,7 @@ def __str__(self): s = "ArrayFormat: npl:{0},format:{1},width:{2},decimal{3}".format( self.npl, self.format, self.width, self.decimal ) - s += ",isfree:{0},isbinary:{1}".format(self._isfree, self._isbinary) + s += f",isfree:{self._isfree},isbinary:{self._isbinary}" return s @staticmethod @@ -316,9 +316,9 @@ def _get_fortran_format(self): if self._isbinary: return "(BINARY)" - fd = "({0:d}{1:s}{2:d}".format(self.npl, self.format, self.width) + fd = f"({self.npl}{self.format}{self.width}" if self.decimal is not None: - fd += ".{0:d})".format(self.decimal) + fd += f".{self.decimal})" else: fd += ")" return fd @@ -359,7 +359,7 @@ def numpy(self): return self._get_numpy_format() def _get_numpy_format(self): - return "%{0}{1}.{2}".format(self.width, self.format, self.decimal) + return f"%{self.width}{self.format}.{self.decimal}" @staticmethod def decode_fortran_descriptor(fd): @@ -409,9 +409,7 @@ def decode_fortran_descriptor(fd): elif fmt == "EN": fmt = "E" return npl, fmt, width, decimal - raise Exception( - "Unrecognized format type: {} looking for: {}".format(fd, fmts) - ) + raise Exception(f"Unrecognized format type: {fd} looking for: {fmts}") def read1d(f, a): @@ -422,7 +420,7 @@ def read1d(f, a): """ if len(a.shape) != 1: raise ValueError( - "read1d: expected 1 dimension, found shape {0}".format(a.shape) + f"read1d: expected 1 dimension, found shape {a.shape}" ) values = [] while len(values) < a.shape[0]: @@ -555,7 +553,7 @@ def __init__( return if len(shape) != 3: raise ValueError( - "Util3d: expected 3 dimensions, found shape {0}".format(shape) + f"Util3d: expected 3 dimensions, found shape {shape}" ) self._model = model self.shape = shape @@ -578,7 +576,7 @@ def __init__( self.name_base.append(self.name[k]) else: if "Layer" not in self.name[k]: - self.name_base.append(self.name[k] + " Layer ") + self.name_base.append(f"{self.name[k]} Layer ") else: self.name_base.append(self.name[k]) self.fmtin = fmtin @@ -611,7 +609,7 @@ def __setitem__(self, k, value): self.util_2ds[k] = new_u2d(self.util_2ds[k], value) else: raise NotImplementedError( - "Util3d doesn't support setitem indices" + str(k) + f"Util3d doesn't support setitem indices: {k}" ) def __setattr__(self, key, value): @@ -665,7 +663,7 @@ def plot( file_extension=None, mflay=None, fignum=None, - **kwargs + **kwargs, ): """ Plot 3-D model input data @@ -736,7 +734,7 @@ def plot( file_extension=file_extension, mflay=mflay, fignum=fignum, - **kwargs + **kwargs, ) return axes @@ -748,7 +746,7 @@ def __getitem__(self, k): elif len(k) == 3: return self.array[k[0], k[1], k[2]] else: - raise Exception("Util3d error: unsupported indices:" + str(k)) + raise Exception(f"Util3d error: unsupported indices: {k}") def get_file_entry(self): s = "" @@ -810,9 +808,9 @@ def build_2d_instances(self): if isinstance(item, Util2d): # we need to reset the external name because most of the # load() methods don't use layer-specific names - item._ext_filename = self.ext_filename_base[ - i - ] + "{0}.ref".format(i + 1) + item._ext_filename = ( + f"{self.ext_filename_base[i]}{i + 1}.ref" + ) # reset the model instance in cases these Util2d's # came from another model instance item.model = self._model @@ -822,7 +820,7 @@ def build_2d_instances(self): ext_filename = None if self._model.external_path is not None: ext_filename = ( - self.ext_filename_base[i] + str(i + 1) + ".ref" + f"{self.ext_filename_base[i]}{i + 1}.ref" ) shape = self.shape[1:] if shape[0] is None: @@ -857,9 +855,7 @@ def build_2d_instances(self): ext_filename = None name = self.name_base[i] + str(i + 1) if self._model.external_path is not None: - ext_filename = ( - self.ext_filename_base[i] + str(i + 1) + ".ref" - ) + ext_filename = f"{self.ext_filename_base[i]}{i + 1}.ref" u2d = Util2d( self._model, self.shape[1:], @@ -893,12 +889,12 @@ def load( ): if len(shape) != 3: raise ValueError( - "Util3d: expected 3 dimensions, found shape {0}".format(shape) + f"Util3d: expected 3 dimensions, found shape {shape}" ) nlay, nrow, ncol = shape u2ds = [] for k in range(nlay): - u2d_name = name + "_Layer_{0}".format(k) + u2d_name = f"{name}_Layer_{k}" if nrow is None: nr = 1 nc = ncol[k] @@ -1084,7 +1080,7 @@ def plottable(self): return False def get_zero_3d(self, kper): - name = self.name_base + str(kper + 1) + "(filled zero)" + name = f"{self.name_base}{kper + 1}(filled zero)" return Util3d( self._model, self.shape, @@ -1105,7 +1101,7 @@ def __getitem__(self, kper): return self.transient_3ds[i] raise Exception( "Transient2d.__getitem__(): error: " - "could not find an entry before kper {0:d}".format(kper) + f"could not find an entry before kper {kper}" ) def __setitem__(self, key, value): @@ -1169,13 +1165,11 @@ def build_transient_sequence(self): key = int(key) except: raise Exception( - "Transient3d error: can't cast key: " - "{} to kper integer".format(key) + f"Transient3d error: can't cast key: {key} to kper integer" ) if key < 0: raise Exception( - "Transient3d error: key can't be negative: " - "{}".format(key) + f"Transient3d error: key can't be negative: {key}" ) try: u3d = self.__get_3d_instance(key, val) @@ -1208,15 +1202,14 @@ def build_transient_sequence(self): ) else: raise Exception( - "Transient3d error: value type not recognized: " - "{}".format(type(self.__value)) + f"Transient3d error: value type not recognized: {type(self.__value)}" ) def __get_3d_instance(self, kper, arg): """ parse an argument into a Util3d instance """ - name = "{}_period{}".format(self.name_base, kper + 1) + name = f"{self.name_base}_period{kper + 1}" u3d = Util3d( self._model, self.shape, @@ -1338,8 +1331,7 @@ def __init__( self._model = model if len(shape) != 2: raise ValueError( - "Transient2d: expected 2 dimensions (nrow, ncol), found " - "shape {0}".format(shape) + f"Transient2d: expected 2 dimensions (nrow, ncol), found shape {shape}" ) if shape[0] is None: # allow for unstructured so that ncol changes by layer @@ -1452,7 +1444,7 @@ def __setattr__(self, key, value): super().__setattr__(key, value) def get_zero_2d(self, kper): - name = self.name_base + str(kper + 1) + "(filled zero)" + name = f"{self.name_base}{kper + 1}(filled zero)" return Util2d( self._model, self.shape, @@ -1468,7 +1460,7 @@ def plot( file_extension=None, kper=0, fignum=None, - **kwargs + **kwargs, ): """ Plot transient 2-D model input data @@ -1546,7 +1538,7 @@ def plot( file_extension=file_extension, kper=kper, fignum=fignum, - **kwargs + **kwargs, ) return axes @@ -1562,7 +1554,7 @@ def __getitem__(self, kper): return self.transient_2ds[i] raise Exception( "Transient2d.__getitem__(): error: " - "could not find an entry before kper {0:d}".format(kper) + f"could not find an entry before kper {kper}" ) def __setitem__(self, key, value): @@ -1623,13 +1615,11 @@ def build_transient_sequence(self): key = int(key) except: raise Exception( - "Transient2d error: can't cast key: " - "{} to kper integer".format(key) + f"Transient2d error: can't cast key: {key} to kper integer" ) if key < 0: raise Exception( - "Transient2d error: key can't be negative: " - "{}".format(key) + f"Transient2d error: key can't be negative: {key}" ) try: u2d = self.__get_2d_instance(key, val) @@ -1662,8 +1652,7 @@ def build_transient_sequence(self): ) else: raise Exception( - "Transient2d error: value type not recognized: " - "{}".format(type(self.__value)) + f"Transient2d error: value type not recognized: {type(self.__value)}" ) def __get_2d_instance(self, kper, arg): @@ -1672,7 +1661,8 @@ def __get_2d_instance(self, kper, arg): """ ext_filename = None name = self.name_base + str(kper + 1) - ext_filename = self.ext_filename_base + str(kper) + ".ref" + # TODO: should ext_filename have "kper + 1" too? + ext_filename = f"{self.ext_filename_base}{kper}.ref" u2d = Util2d( self._model, self.shape, @@ -1840,13 +1830,12 @@ def __init__( if np.dtype(int).itemsize != 4: # show warning for platforms where int is not 4-bytes warn( - "Util2d: setting integer dtype from {} to int32 for array {}".format( - dtype, name - ) + f"Util2d: setting integer dtype from {dtype} to int32 " + f"for array {name}" ) dtype = np.int32 if dtype not in [np.int32, np.float32, bool]: - raise TypeError("Util2d:unsupported dtype: " + str(dtype)) + raise TypeError(f"Util2d:unsupported dtype: {dtype!s}") if name is not None: name = name.lower() @@ -1932,7 +1921,7 @@ def plot( filename_base=None, file_extension=None, fignum=None, - **kwargs + **kwargs, ): """ Plot 2-D model input data @@ -2003,7 +1992,7 @@ def plot( filename_base=filename_base, file_extension=file_extension, fignum=fignum, - **kwargs + **kwargs, ) return axes @@ -2192,11 +2181,13 @@ def model_file_path(self): def get_constant_cr(self, value): if self.format.array_free_format: - lay_space = "{0:>27s}".format("") + lay_space = " " * 27 if self.vtype in [int, np.int32]: - lay_space = "{0:>32s}".format("") - cr = "CONSTANT " + self.format.py[1].format(value) - cr = "{0:s}{1:s}#{2:<30s}\n".format(cr, lay_space, self._name) + lay_space = " " * 32 + cr = ( + f"CONSTANT {self.format.py[1].format(value)}" + f"{lay_space}#{self._name:<30s}\n" + ) else: cr = self._get_fixed_cr(0, value=value) return cr @@ -2246,7 +2237,7 @@ def cnstnt_str(self): if isinstance(self.cnstnt, str): return self.cnstnt else: - return "{0:15.6G}".format(self.cnstnt) + return f"{self.cnstnt:15.6G}" def get_openclose_cr(self): cr = "OPEN/CLOSE {0:>30s} {1:15} {2:>10s} {3:2.0f} {4:<30s}\n".format( @@ -2285,11 +2276,7 @@ def get_file_entry(self, how=None): how = self._how if not self.format.array_free_format and self.format.free: - print( - "Util2d {0}: can't be free format...resetting".format( - self._name - ) - ) + print(f"Util2d {self._name}: can't be free format...resetting") self.format._isfree = False if ( @@ -2308,7 +2295,7 @@ def get_file_entry(self, how=None): "constant", "internal", ]: - print("Util2d:{0}: resetting 'how' to external".format(self._name)) + print(f"Util2d:{self._name}: resetting 'how' to external") if self.format.array_free_format: how = "openclose" else: @@ -2387,8 +2374,7 @@ def get_file_entry(self, how=None): else: raise Exception( - "Util2d.get_file_entry() error: " - "unrecognized 'how':{0}".format(how) + f"Util2d.get_file_entry() error: unrecognized 'how':{how}" ) @property @@ -2425,7 +2411,7 @@ def array(self): """ if isinstance(self.cnstnt, str): - print("WARNING: cnstnt is str for {0}".format(self.name)) + print(f"WARNING: cnstnt is str for {self.name}") return self._array.astype(self.dtype) if isinstance(self.cnstnt, (int, np.int32)): cnstnt = self.cnstnt @@ -2490,9 +2476,7 @@ def load_block(shape, file_in, dtype): """ if len(shape) != 2: raise ValueError( - "Util2d.load_block(): expected 2 dimensions, found shape {0}".format( - shape - ) + f"Util2d.load_block(): expected 2 dimensions, found shape {shape}" ) nrow, ncol = shape data = np.ma.zeros(shape, dtype=dtype) @@ -2507,8 +2491,7 @@ def load_block(shape, file_in, dtype): raw = line.split() if len(raw) < 5: raise ValueError( - "Util2d.load_block(): expected 5 items, " - "found {0}: {1}".format(len(raw), line) + f"Util2d.load_block(): expected 5 items, found {len(raw)}: {line}" ) i1, i2 = int(raw[0]) - 1, int(raw[1]) j1, j2 = int(raw[2]) - 1, int(raw[3]) @@ -2549,9 +2532,7 @@ def load_txt(shape, file_in, dtype, fmtin): num_items = nrow * ncol else: raise ValueError( - "Util2d.load_txt(): expected 1 or 2 dimensions, found shape {0}".format( - shape - ) + f"Util2d.load_txt(): expected 1 or 2 dimensions, found shape {shape}" ) openfile = not hasattr(file_in, "read") if openfile: @@ -2642,11 +2623,9 @@ def array2string(shape, data, fortran_format="(FREE)", python_format=None): decimal, ) = ArrayFormat.decode_fortran_descriptor(fortran_format) if decimal is None: - output_fmt = "{0}0:{1}{2}{3}".format("{", width, "d", "}") + output_fmt = f"{{0:{width}d}}" else: - output_fmt = "{0}0:{1}.{2}{3}{4}".format( - "{", width, decimal, fmt, "}" - ) + output_fmt = f"{{0:{width}.{decimal}{fmt}}}" else: try: column_length, output_fmt = ( @@ -2708,11 +2687,7 @@ def load_bin(shape, file_in, dtype, bintype=None): dtype = np.dtype(dtype) if dtype.itemsize != 4: # show warning for platforms where int is not 4-bytes - warn( - "Util2d: setting integer dtype from {0} to int32".format( - dtype - ) - ) + warn(f"Util2d: setting integer dtype from {dtype} to int32") dtype = np.int32 openfile = not hasattr(file_in, "read") if openfile: @@ -2761,8 +2736,7 @@ def parse_value(self, value): except: raise Exception( - "Util2d:could not cast " - 'boolean value to type "bool": {}'.format(value) + f'Util2d:could not cast boolean value to type "bool": {value}' ) else: raise Exception( @@ -2796,16 +2770,14 @@ def parse_value(self, value): self.__value = np.int32(value) except: raise Exception( - "Util2d:could not cast scalar " - 'value to type "int": {}'.format(value) + f'Util2d:could not cast scalar value to type "int": {value}' ) elif self._dtype == np.float32: try: self.__value = np.float32(value) except: raise Exception( - "Util2d:could not cast " - 'scalar value to type "float": {}'.format(value) + f'Util2d:could not cast scalar value to type "float": {value}' ) elif isinstance(value, np.ndarray): @@ -2824,7 +2796,7 @@ def parse_value(self, value): else: raise Exception( - "Util2d:unsupported type in util_array: " + str(type(value)) + f"Util2d:unsupported type in util_array: {type(value)}" ) @classmethod @@ -2897,9 +2869,7 @@ def load( # load_txt(shape, file_in, dtype, fmtin): assert os.path.exists( fname - ), "Util2d.load() error: open/close file {} not found".format( - fname - ) + ), f"Util2d.load() error: open/close file {fname} not found" if str("binary") not in str(cr_dict["fmtin"].lower()): f = open(fname, "r") data = Util2d.load_txt( @@ -3033,8 +3003,7 @@ def parse_control_record( fname = ext_unit_dict[int(raw[1])].filename.strip() except: print( - " could not determine filename " - "for unit {}".format(raw[1]) + f" could not determine filename for unit {raw[1]}" ) nunit = int(raw[1]) diff --git a/flopy/utils/util_list.py b/flopy/utils/util_list.py index e559e1bd6..209afc9f7 100644 --- a/flopy/utils/util_list.py +++ b/flopy/utils/util_list.py @@ -144,10 +144,7 @@ def append(self, other): model=self._model, list_free_format=self.list_free_format, ) - msg = ( - "MfList.append(): other arg must be " - "MfList or dict, not {0}".format(type(other)) - ) + msg = f"MfList.append(): other arg must be MfList or dict, not {type(other)}" assert isinstance(other, MfList), msg other_kpers = list(other.data.keys()) @@ -289,8 +286,7 @@ def fmt_string(self): raise TypeError(msg) else: raise TypeError( - "MfList.fmt_string error: unknown vtype in " - "field: {}".format(field) + f"MfList.fmt_string error: unknown vtype in field: {field}" ) if use_free: fmt_string = " " + " ".join(fmts) @@ -309,7 +305,7 @@ def __cast_data(self, data): data = np.array(data) except Exception as e: raise Exception( - "MfList error: casting list to ndarray: " + str(e) + f"MfList error: casting list to ndarray: {e!s}" ) # If data is a dict, the we have to assume it is keyed on kper @@ -321,8 +317,8 @@ def __cast_data(self, data): kper = int(kper) except Exception as e: raise Exception( - "MfList error: data dict key {:s} not integer: " - "{}\n{}".format(kper, type(kper), e) + f"MfList error: data dict key {kper} not integer: " + f"{type(kper)}\n{e!s}" ) # Same as before, just try... if isinstance(d, list): @@ -332,8 +328,7 @@ def __cast_data(self, data): d = np.array(d) except Exception as e: raise Exception( - "MfList error: casting list " - "to ndarray: {}".format(e) + f"MfList error: casting list to ndarray: {e}" ) # super hack - sick of recarrays already @@ -354,7 +349,7 @@ def __cast_data(self, data): else: raise Exception( "MfList error: unsupported data type: " - "{} at kper {:d}".format(type(d), kper) + f"{type(d)} at kper {kper}" ) # A single recarray - same MfList for all stress periods @@ -368,14 +363,14 @@ def __cast_data(self, data): self.__cast_str(0, data) else: raise Exception( - "MfList error: unsupported data type: " + str(type(data)) + f"MfList error: unsupported data type: {type(data)}" ) def __cast_str(self, kper, d): # If d is a string, assume it is a filename and check that it exists assert os.path.exists(d), ( - "MfList error: dict filename (string) '{}' value for " - "kper {:d} not found".format(d, kper) + f"MfList error: dict filename (string) '{d}' value for " + f"kper {kper} not found" ) self.__data[kper] = d self.__vtype[kper] = str @@ -418,7 +413,7 @@ def __cast_ndarray(self, kper, d): ) except Exception as e: raise Exception( - "MfList error: casting ndarray to recarray: " + str(e) + f"MfList error: casting ndarray to recarray: {e!s}" ) self.__vtype[kper] = np.recarray @@ -546,8 +541,7 @@ def add_record(self, kper, index, values): self.__data[kper][-1] = tuple(rec) except Exception as e: raise Exception( - "MfList.add_record() error: adding record to " - "recarray: {}".format(e) + f"MfList.add_record() error: adding record to recarray: {e}" ) def __getitem__(self, kper): @@ -561,8 +555,7 @@ def __getitem__(self, kper): kper = int(kper) except Exception as e: raise Exception( - "MfList error: _getitem__() passed invalid kper index: " - + str(kper) + f"MfList error: _getitem__() passed invalid kper index: {kper}" ) if kper not in list(self.data.keys()): if kper == 0: @@ -582,7 +575,7 @@ def __getitem__(self, kper): def __setitem__(self, kper, data): if kper in list(self.__data.keys()): if self._model.verbose: - print("removing existing data for kper={}".format(kper)) + print(f"removing existing data for kper={kper}") self.data.pop(kper) # If data is a list, then all we can do is try to cast it to # an ndarray, then cast again to a recarray @@ -592,7 +585,7 @@ def __setitem__(self, kper, data): data = np.array(data) except Exception as e: raise Exception( - "MfList error: casting list to ndarray: " + str(e) + f"MfList error: casting list to ndarray: {e!s}" ) # cast data if isinstance(data, int): @@ -607,7 +600,7 @@ def __setitem__(self, kper, data): self.__cast_str(kper, data) else: raise Exception( - "MfList error: unsupported data type: " + str(type(data)) + f"MfList error: unsupported data type: {type(data)}" ) # raise NotImplementedError("MfList.__setitem__() not implemented") @@ -618,8 +611,7 @@ def __fromfile(self, f): d = np.genfromtxt(f, dtype=self.dtype) except Exception as e: raise Exception( - "MfList.__fromfile() error reading recarray from file " - + str(e) + f"MfList.__fromfile() error reading recarray from file {e!s}" ) return d @@ -643,7 +635,7 @@ def get_filenames(self): # py_filepath = '' # py_filepath = os.path.join(py_filepath, # self._model.external_path) - filename = self.package.name[0] + "_{0:04d}.dat".format(kper) + filename = f"{self.package.name[0]}_{kper:04d}.dat" filenames.append(filename) return filenames @@ -651,7 +643,7 @@ def get_filename(self, kper): ext = "dat" if self.binary: ext = "bin" - return self.package.name[0] + "_{0:04d}.{1}".format(kper, ext) + return f"{self.package.name[0]}_{kper:04d}.{ext}" @property def binary(self): @@ -697,11 +689,7 @@ def write_transient(self, f, single_per=None, forceInternal=False): itmp = -1 kper_vtype = int - f.write( - " {0:9d} {1:9d} # stress period {2:d}\n".format( - itmp, 0, kper + 1 - ) - ) + f.write(f" {itmp:9d} {0:9d} # stress period {kper + 1}\n") isExternal = False if ( @@ -744,7 +732,7 @@ def write_transient(self, f, single_per=None, forceInternal=False): else: self.__tofile(f, kper_data) elif kper_vtype == str: - f.write(" open/close " + kper_data) + f.write(f" open/close {kper_data}") if self.__binary: f.write(" (BINARY)") f.write("\n") @@ -884,7 +872,7 @@ def plot( filename_base=None, file_extension=None, mflay=None, - **kwargs + **kwargs, ): """ Plot stress period boundary condition (MfList) data for a specified @@ -965,7 +953,7 @@ def plot( filename_base=filename_base, file_extension=file_extension, mflay=mflay, - **kwargs + **kwargs, ) return axes @@ -1212,8 +1200,7 @@ def masked4D_arrays_to_stress_period_data(dtype, m4ds): a2 = np.isnan(m4ds[key2]) if not np.array_equal(a1, a2): raise Exception( - "Transient2d error: masking not equal " - "for {0} and {1}".format(key1, key2) + f"Transient2d error: masking not equal for {key1} and {key2}" ) sp_data = {} diff --git a/flopy/utils/voronoi.py b/flopy/utils/voronoi.py index a7fa12604..2b89fa308 100644 --- a/flopy/utils/voronoi.py +++ b/flopy/utils/voronoi.py @@ -287,7 +287,5 @@ def plot(self, ax=None, plot_title=True, **kwargs): ax = plt.subplot(1, 1, 1, aspect="equal") pc = self.get_patch_collection(ax, **kwargs) if plot_title: - ax.set_title( - "ncells: {}; nverts: {}".format(self.ncpl, self.nverts) - ) + ax.set_title(f"ncells: {self.ncpl}; nverts: {self.nverts}") return ax diff --git a/flopy/utils/zonbud.py b/flopy/utils/zonbud.py index 778982107..16d185382 100644 --- a/flopy/utils/zonbud.py +++ b/flopy/utils/zonbud.py @@ -50,7 +50,7 @@ def __init__( totim=None, aliases=None, verbose=False, - **kwargs + **kwargs, ): from .binaryfile import CellBudgetFile @@ -59,9 +59,7 @@ def __init__( elif isinstance(cbc_file, str) and os.path.isfile(cbc_file): self.cbc = CellBudgetFile(cbc_file) else: - raise Exception( - "Cannot load cell budget file: {}.".format(cbc_file) - ) + raise Exception(f"Cannot load cell budget file: {cbc_file}.") if isinstance(z, np.ndarray): assert np.issubdtype( @@ -88,7 +86,7 @@ def __init__( self.dis = kwargs.pop("dis") if len(kwargs.keys()) > 0: args = ",".join(kwargs.keys()) - raise Exception("LayerFile error: unrecognized kwargs: " + args) + raise Exception(f"LayerFile error: unrecognized kwargs: {args}") # Check the shape of the cbc budget file arrays self.cbc_shape = self.cbc.get_data(idx=0, full3D=True)[0].shape @@ -102,10 +100,7 @@ def __init__( if isinstance(kstpkper, tuple): kstpkper = [kstpkper] for kk in kstpkper: - s = ( - "The specified time step/stress period " - "does not exist {}".format(kk) - ) + s = f"The specified time step/stress period does not exist {kk}" assert kk in self.cbc.get_kstpkper(), s self.kstpkper = kstpkper elif totim is not None: @@ -114,10 +109,7 @@ def __init__( elif isinstance(totim, int): totim = [float(totim)] for t in totim: - s = ( - "The specified simulation time " - "does not exist {}".format(t) - ) + s = f"The specified simulation time does not exist {t}" assert t in self.cbc.get_times(), s self.totim = totim else: @@ -146,12 +138,12 @@ def __init__( izone = np.zeros(self.cbc_shape, self.int_type) izone[:] = z[0, :, :] else: - e = "Shape of the zone array is not recognized: {}".format(z.shape) + e = f"Shape of the zone array is not recognized: {z.shape}" raise Exception(e) self.izone = izone self.allzones = np.unique(izone) - self._zonenamedict = {z: "ZONE_{}".format(z) for z in self.allzones} + self._zonenamedict = {z: f"ZONE_{z}" for z in self.allzones} if aliases is not None: s = ( @@ -237,7 +229,7 @@ def __init__( elif self.totim is not None: for t in self.totim: if verbose: - s = "Computing the budget for time {}".format(t) + s = f"Computing the budget for time {t}" print(s) self._compute_budget(totim=t) @@ -1061,7 +1053,7 @@ def _accumulate_flow_ssst(self, recname, kstpkper, totim): else: # Should not happen raise Exception( - 'Unrecognized "imeth" for {} record: {}'.format(recname, imeth) + f'Unrecognized "imeth" for {recname} record: {imeth}' ) # Inflows @@ -1518,9 +1510,7 @@ def read_zone_file(cls, fname): # READ EXTERNAL FILE fname = rowitems[0] if not os.path.isfile(fname): - errmsg = 'Could not find external file "{}"'.format( - fname - ) + errmsg = f'Could not find external file "{fname}"' raise Exception(errmsg) with open(fname, "r") as ext_f: ext_flines = ext_f.readlines() @@ -1537,7 +1527,7 @@ def read_zone_file(cls, fname): raise Exception(errmsg) else: # Should not get here - raise Exception("Locat not recognized: {}".format(locat)) + raise Exception(f"Locat not recognized: {locat}") # IGNORE COMPOSITE ZONES @@ -1602,9 +1592,7 @@ def write_zone_file(cls, fname, array, fmtin=None, iprn=None): array = b.copy() elif len(array.shape) < 2 or len(array.shape) > 3: raise Exception( - "Shape of the input array is not recognized: {}".format( - array.shape - ) + f"Shape of the input array is not recognized: {array.shape}" ) if np.ma.is_masked(array): array = np.ma.filled(array, 0) @@ -1623,18 +1611,14 @@ def write_zone_file(cls, fname, array, fmtin=None, iprn=None): if iprn is None or iprn <= iprnmin: iprn = iprnmin + 1 - formatter_str = "{{:>{iprn}}}".format(iprn=iprn) + formatter_str = f"{{:>{iprn}}}" formatter = formatter_str.format with open(fname, "w") as f: - header = "{nlay} {nrow} {ncol}\n".format( - nlay=nlay, nrow=nrow, ncol=ncol - ) + header = f"{nlay} {nrow} {ncol}\n" f.write(header) for lay in range(nlay): - record_2 = "INTERNAL\t({fmtin}I{iprn})\n".format( - fmtin=fmtin, iprn=iprn - ) + record_2 = f"INTERNAL\t({fmtin}I{iprn})\n" f.write(record_2) if fmtin < ncol: for row in range(nrow): @@ -1861,7 +1845,7 @@ def __setattr__(self, key, value): def __getattr__(self, item): if item in ("zon", "bud", "grb", "name", "model_ws"): - item = "_{}".format(item) + item = f"_{item}" return super().__getattribute__(item) def add_package(self, pkg_name, pkg): @@ -1882,7 +1866,7 @@ def add_package(self, pkg_name, pkg): pkg_name = "bud" else: raise KeyError( - "{} package is not valid for zonebudget".format(pkg_name) + f"{pkg_name} package is not valid for zonebudget" ) if isinstance(pkg, str): @@ -1898,7 +1882,7 @@ def add_package(self, pkg_name, pkg): else: pass - pkg_name = "_{}".format(pkg_name) + pkg_name = f"_{pkg_name}" self.__setattr__(pkg_name, pkg) if pkg is not None: self.package_dict[pkg_name[1:]] = pkg @@ -1929,9 +1913,7 @@ def change_model_name(self, name): """ self._name = name if self._zon is not None: - self._zon.filename = "{}.{}".format( - name, self._zon.filename.split(".")[-1] - ) + self._zon.filename = f"{name}.{self._zon.filename.split('.')[-1]}" def get_dataframes( self, @@ -2021,7 +2003,7 @@ def get_budget( aliases = self._zon.aliases if f is None and self._recarray is None: - f = os.path.join(self._model_ws, self._name + ".csv") + f = os.path.join(self._model_ws, f"{self._name}.csv") self._recarray = _read_zb_csv2( f, add_prefix=False, aliases=aliases ) @@ -2094,7 +2076,7 @@ def write_input(self, line_length=20): else: path = pkg.filename pkg.write_input(line_length=line_length) - nam.append(" {} {}\n".format(pkg_nam.upper(), path)) + nam.append(f" {pkg_nam.upper()} {path}\n") path = os.path.join(self._model_ws, self._name + self._extension) with open(path, "w") as foo: @@ -2197,7 +2179,7 @@ def __init__(self, model, izone, extension=".zon", aliases=None): self.filename = self._parent.name + extension self.aliases = aliases self.allzones = [int(z) for z in np.unique(izone) if z != 0] - self._zonenamedict = {z: "ZONE_{}".format(z) for z in self.allzones} + self._zonenamedict = {z: f"ZONE_{z}" for z in self.allzones} if aliases is not None: if not isinstance(aliases, dict): @@ -2210,7 +2192,7 @@ def __init__(self, model, izone, extension=".zon", aliases=None): self.aliases[zn] = "_".join(alias.split()) else: pop_list.append(zn) - print("warning: zone number {} not found".format(zn)) + print(f"warning: zone number {zn} not found") for p in pop_list: aliases.pop(p) @@ -2240,8 +2222,8 @@ def write_input(self, f=None, line_length=20): with open(f, "w") as foo: bfmt = [" {:d}"] foo.write( - "BEGIN DIMENSIONS\n NCELLS {:d}\n" - "END DIMENSIONS\n\n".format(self.ncells) + f"BEGIN DIMENSIONS\n NCELLS {self.ncells}\n" + "END DIMENSIONS\n\n" ) foo.write("BEGIN GRIDDATA\n IZONE\n") @@ -2389,11 +2371,11 @@ def _recarray_to_dataframe( try: import pandas as pd except Exception as e: - msg = "ZoneBudget.get_dataframes() error import pandas: " + str(e) + msg = f"ZoneBudget.get_dataframes() error import pandas: {e!s}" raise ImportError(msg) valid_index_keys = ["totim", "kstpkper"] - s = 'index_key "{}" is not valid.'.format(index_key) + s = f'index_key "{index_key}" is not valid.' assert index_key in valid_index_keys, s valid_timeunit = ["S", "M", "H", "D", "Y"] @@ -2410,8 +2392,7 @@ def _recarray_to_dataframe( timeunit = "Y" errmsg = ( - "Specified time units ({}) not recognized. " - "Please use one of ".format(timeunit) + f"Specified time units ({timeunit}) not recognized. Please use one of " ) assert timeunit in valid_timeunit, errmsg + ", ".join(valid_timeunit) + "." @@ -2640,8 +2621,8 @@ def _read_zb_zblst(fname): line = foo.readline().strip() zones = [int(i) for i in line.split()] for zone in zones: - data["TO_ZONE_{}".format(zone)] = [] - data["FROM_ZONE_{}".format(zone)] = [] + data[f"TO_ZONE_{zone}"] = [] + data[f"FROM_ZONE_{zone}"] = [] if "FLOW BUDGET FOR ZONE" in line: flow_budget = True @@ -2675,9 +2656,9 @@ def _read_zb_zblst(fname): if "ZONE" in line: if prefix == "FROM_": zlist.append(int(label.split()[1])) - label = "FROM_ZONE_{}".format(label.split()[1]) + label = f"FROM_ZONE_{label.split()[1]}" else: - label = "TO_ZONE_{}".format(label.split()[-1]) + label = f"TO_ZONE_{label.split()[-1]}" elif "TOTAL" in line or "PERCENT DISCREPANCY" in line: label = "_".join(label.split()) @@ -2698,8 +2679,8 @@ def _read_zb_zblst(fname): for zone in zones: if zone in zlist: continue - data["FROM_ZONE_{}".format(zone)].append(0) - data["TO_ZONE_{}".format(zone)].append(0) + data[f"FROM_ZONE_{zone}"].append(0) + data[f"TO_ZONE_{zone}"].append(0) elif "OUT:" in line: prefix = "TO_" @@ -2896,9 +2877,9 @@ def _zb_dict_to_recarray(data, aliases=None): if zn in aliases: zone_dtypes.append((aliases[zn], float)) else: - zone_dtypes.append(("ZONE_{}".format(int(zn)), float)) + zone_dtypes.append((f"ZONE_{int(zn)}", float)) else: - zone_dtypes.append(("ZONE_{}".format(int(zn)), float)) + zone_dtypes.append((f"ZONE_{int(zn)}", float)) dtype = [ ("totim", float), diff --git a/flopy/version.py b/flopy/version.py index 3afa903fd..b402fdb88 100644 --- a/flopy/version.py +++ b/flopy/version.py @@ -4,7 +4,7 @@ major = 3 minor = 3 micro = 5 -__version__ = "{:d}.{:d}.{:d}".format(major, minor, micro) +__version__ = f"{major}.{minor}.{micro}" __pakname__ = "flopy" diff --git a/release/make-release.py b/release/make-release.py index 3d128c361..da781c004 100644 --- a/release/make-release.py +++ b/release/make-release.py @@ -27,9 +27,9 @@ authors = [] for key in author_dict.keys(): t = key.split() - author = "{}".format(t[-1]) + author = f"{t[-1]}" for str in t[0:-1]: - author += " {}".format(str) + author += f" {str}" authors.append(author) approved = """Disclaimer @@ -107,13 +107,13 @@ def get_branch(): def get_version_str(v0, v1, v2): - version_type = ("{}".format(v0), "{}".format(v1), "{}".format(v2)) + version_type = (f"{v0}", f"{v1}", f"{v2}") version = ".".join(version_type) return version def get_tag(v0, v1, v2): - tag_type = ("{}".format(v0), "{}".format(v1), "{}".format(v2)) + tag_type = (f"{v0}", f"{v1}", f"{v2}") tag = ".".join(tag_type) return tag @@ -131,26 +131,24 @@ def get_software_citation(version, is_approved): if ipos == len(authors) - 1: line += "and " sv = author.split() - tauthor = "{}".format(sv[0]) + tauthor = f"{sv[0]}" if len(sv) < 3: gname = sv[1] if len(gname) > 1: - tauthor += ", {}".format(gname) + tauthor += f", {gname}" else: - tauthor += ", {}.".format(gname[0]) + tauthor += f", {gname[0]}." else: - tauthor += ", {}. {}.".format(sv[1][0], sv[2][0]) + tauthor += f", {sv[1][0]}. {sv[2][0]}." # add formatted author name to line line += tauthor # add the rest of the citation line += ( - ", {}, ".format(now.year) - + "FloPy v{}{}: ".format(version, sb) - + "U.S. Geological Survey Software Release, " - + "{}, ".format(now.strftime("%d %B %Y")) - + "http://dx.doi.org/10.5066/F7BK19FH]" - + "(http://dx.doi.org/10.5066/F7BK19FH)" + f", {now.year}, FloPy v{version}{sb}: " + f"U.S. Geological Survey Software Release, {now:%d %B %Y}, " + "http://dx.doi.org/10.5066/F7BK19FH]" + "(http://dx.doi.org/10.5066/F7BK19FH)" ) return line @@ -186,26 +184,23 @@ def update_version(): f = open(fpth, "w") f.write( ( - "# {} version file automatically created " - "using...{}\n".format(pak, os.path.basename(__file__)) + f"# {pak} version file automatically created " + f"using...{os.path.basename(__file__)}\n" ) ) f.write( - "# created on..." - + "{0}\n".format( - datetime.datetime.now().strftime("%B %d, %Y %H:%M:%S") - ) + f"# created on...{datetime.datetime.now():%B %d, %Y %H:%M:%S}\n" ) f.write("\n") - f.write("major = {}\n".format(vmajor)) - f.write("minor = {}\n".format(vminor)) - f.write("micro = {}\n".format(vmicro)) - f.write('__version__ = "{:d}.{:d}.{:d}".format(major, minor, micro)\n') + f.write(f"major = {vmajor}\n") + f.write(f"minor = {vminor}\n") + f.write(f"micro = {vmicro}\n") + f.write('__version__ = f"{major}.{minor}.{micro}"\n') # write the remainder of the version file if name_pos is not None: for line in lines[name_pos:]: - f.write("{}\n".format(line)) + f.write(f"{line}\n") f.close() print("Successfully updated version.py") except: @@ -283,7 +278,7 @@ def update_readme_markdown(vmajor, vminor, vmicro): f = open(fpth, "w") for line in lines: if "### Version " in line: - line = "### Version {}".format(version) + line = f"### Version {version}" if not is_approved: line += " — release candidate" elif "[flopy continuous integration]" in line: @@ -319,7 +314,7 @@ def update_readme_markdown(vmajor, vminor, vmicro): elif "Disclaimer" in line: line = disclaimer terminate = True - f.write("{}\n".format(line)) + f.write(f"{line}\n") if terminate: break @@ -362,7 +357,7 @@ def update_notebook_examples_markdown(): "(https://mybinder.org/v2/gh/modflowpy/flopy.git/" "{})".format(branch) ) - f.write("{}\n".format(line)) + f.write(f"{line}\n") f.close() @@ -388,7 +383,7 @@ def update_PyPi_release(vmajor, vminor, vmicro): elif "Disclaimer" in line: line = disclaimer terminate = True - f.write("{}\n".format(line)) + f.write(f"{line}\n") if terminate: break diff --git a/release/run_notebooks.py b/release/run_notebooks.py index 1ae2aff3b..c3132dfd9 100644 --- a/release/run_notebooks.py +++ b/release/run_notebooks.py @@ -49,4 +49,4 @@ # write out failed runs for idx, src in enumerate(failed_runs): - print("{:2d}...{} FAILED".format(idx + 1, src)) + print(f"{idx + 1:2d}...{src} FAILED") diff --git a/release/update-version_changes.py b/release/update-version_changes.py index 0b3b96290..d007dfcea 100644 --- a/release/update-version_changes.py +++ b/release/update-version_changes.py @@ -63,7 +63,7 @@ def get_version(): f.close() - return "{:d}.{:d}.{:d}".format(major, minor, micro) + return f"{major}.{minor}.{micro}" def get_branch(): @@ -111,7 +111,7 @@ def get_hash_dict(branch): # get hash and fmt = '--pretty="%H"' - since = '--since="{}"'.format(tag_date) + since = f'--since="{tag_date}"' hash_dict = {"fix": {}, "feat": {}} cmdlist = ("git", "log", branch, fmt, since) stdout = process_Popen(cmdlist) @@ -124,7 +124,7 @@ def get_hash_dict(branch): # parse stdout for line in stdout.splitlines(): hash = line.split()[0].replace('"', "") - url = "https://github.com/modflowpy/flopy/commit/{}".format(hash) + url = f"https://github.com/modflowpy/flopy/commit/{hash}" fmt = '--pretty="%s."' cmdlist = ("git", "log", fmt, "-n1", hash) subject = process_Popen(cmdlist).strip().replace('"', "") @@ -135,7 +135,7 @@ def get_hash_dict(branch): key = None if ipos > -1: type = subject[0:ipos] - subject = subject.replace(type + ":", "").strip().capitalize() + subject = subject.replace(f"{type}:", "").strip().capitalize() for tag in fix_tags: if type.lower().startswith(tag): key = "fix" @@ -162,8 +162,8 @@ def get_hash_dict(branch): break if key is not None: - message = "[{}]({}): ".format(type, url) - message += subject + " " + cdate + message = f"[{type}]({url}): " + message += f"{subject} {cdate}" if key == "fix": fix_dict[hash] = message elif key == "feat": @@ -180,7 +180,7 @@ def create_md(hash_dict): # get current version information version = get_version() tag = "### Version" - version_text = "{} {}".format(tag, version) + version_text = f"{tag} {version}" # # read the lines in the existing version_changes.md @@ -202,11 +202,11 @@ def create_md(hash_dict): # write the changes for the latest comment if write_update: write_update = False - f.write("{}\n\n".format(version_text)) + f.write(f"{version_text}\n\n") write_version_changes(f, hash_dict) if write_line: - f.write("{}\n".format(line)) + f.write(f"{line}\n") f.close()