Skip to content

Fix unclosed file warnings #53

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/run_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
- uses: "actions/setup-python@v2"
with:
python-version: "${{ matrix.python }}"
- uses: "actions/cache@v2"
- uses: "actions/cache@v3"
id: "cache"
with:
path: "${{ env.pythonLocation }}"
Expand Down
8 changes: 7 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,14 @@ conda install -c conda-forge xarray_selafin

```python
import xarray as xr
ds = xr.open_dataset("tests/data/r3d_tidal_flats.slf", engine="selafin")

with xr.open_dataset("tests/data/r3d_tidal_flats.slf", engine="selafin") as ds:
print(ds) # do something with `ds`...
# `ds.close()` not necessary

ds = xr.open_dataset("tests/data/r3d_tidal_flats.slf", lang="fr", engine="selafin") # if variables are in French
print(ds) # do something with `ds`...
ds.close() # avoid a ResourceWarning (unclosed file)
```

```
Expand Down
138 changes: 69 additions & 69 deletions tests/io_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,54 +34,61 @@ def write_netcdf(ds, nc_out):
ds.to_netcdf(nc_out)


@TIDAL_FLATS
def test_open_dataset(slf_in):
ds = xr.open_dataset(slf_in, engine="selafin")
assert isinstance(ds, xr.Dataset)
repr(ds)

# Dimensions
assert ds.sizes["time"] == 17
assert ds.sizes["node"] == 648
if "r3d" in slf_in:
assert ds.sizes["plan"] == 21
def equals_dataset_vs_netcdf_export(ds, nc_out):
write_netcdf(ds, nc_out)
ds_nc = xr.open_dataset(nc_out)
return ds_nc.equals(ds)

# Coordinates
assert "x" in ds.coords
assert "y" in ds.coords
assert "time" in ds.coords

# Attributes
assert ds.attrs["endian"] == ">"
assert ds.attrs["date_start"] == (1900, 1, 1, 0, 0, 0)
assert "ipobo" in ds.attrs
assert "ikle2" in ds.attrs
if "r3d_" in slf_in:
assert "ikle3" in ds.attrs
else:
assert "ikle3" not in ds.attrs
@TIDAL_FLATS
def test_open_dataset(slf_in):
with xr.open_dataset(slf_in, engine="selafin") as ds:
assert isinstance(ds, xr.Dataset)
repr(ds)

# Dimensions
assert ds.sizes["time"] == 17
assert ds.sizes["node"] == 648
if "r3d" in slf_in:
assert ds.sizes["plan"] == 21

# Coordinates
assert "x" in ds.coords
assert "y" in ds.coords
assert "time" in ds.coords

# Attributes
assert ds.attrs["endian"] == ">"
assert ds.attrs["date_start"] == (1900, 1, 1, 0, 0, 0)
assert "ipobo" in ds.attrs
assert "ikle2" in ds.attrs
if "r3d_" in slf_in:
assert "ikle3" in ds.attrs
else:
assert "ikle3" not in ds.attrs


@TIDAL_FLATS
def test_to_netcdf(tmp_path, slf_in):
ds_slf = xr.open_dataset(slf_in, engine="selafin")
nc_out = tmp_path / "test.nc"
write_netcdf(ds_slf, nc_out)
ds_nc = xr.open_dataset(nc_out)
assert ds_nc.equals(ds_slf)
with xr.open_dataset(slf_in, engine="selafin") as ds_slf:
nc_out = tmp_path / "test.nc"
write_netcdf(ds_slf, nc_out)
ds_nc = xr.open_dataset(nc_out)
assert ds_nc.equals(ds_slf)


@TIDAL_FLATS
def test_to_selafin(tmp_path, slf_in):
ds_slf = xr.open_dataset(slf_in, engine="selafin")
with xr.open_dataset(slf_in, engine="selafin") as ds_slf:

# Remove some data which is rebuilt
del ds_slf.attrs["date_start"]
# Remove some data which is rebuilt
del ds_slf.attrs["date_start"]

slf_out = tmp_path / "test.slf"
ds_slf.selafin.write(slf_out)
ds_slf2 = xr.open_dataset(slf_out, engine="selafin")
assert ds_slf2.equals(ds_slf)
slf_out = tmp_path / "test.slf"
ds_slf.selafin.write(slf_out)

with xr.open_dataset(slf_out, engine="selafin") as ds_slf2:
assert ds_slf2.equals(ds_slf)

# Compare binary files
with open(slf_in, "rb") as in_slf1, open(slf_out, "rb") as in_slf2:
Expand All @@ -90,15 +97,16 @@ def test_to_selafin(tmp_path, slf_in):

@TIDAL_FLATS
def test_to_selafin_eager_mode(tmp_path, slf_in):
ds_slf = xr.open_dataset(slf_in, lazy_loading=False, engine="selafin")
with xr.open_dataset(slf_in, lazy_loading=False, engine="selafin") as ds_slf:

# Remove some data which is rebuilt
del ds_slf.attrs["date_start"]
# Remove some data which is rebuilt
del ds_slf.attrs["date_start"]

slf_out = tmp_path / "test.slf"
ds_slf.selafin.write(slf_out)
ds_slf2 = xr.open_dataset(slf_out, engine="selafin")
assert ds_slf2.equals(ds_slf)
slf_out = tmp_path / "test.slf"
ds_slf.selafin.write(slf_out)

with xr.open_dataset(slf_out, engine="selafin") as ds_slf2:
assert ds_slf2.equals(ds_slf)

# Compare binary files
with open(slf_in, "rb") as in_slf1, open(slf_out, "rb") as in_slf2:
Expand All @@ -108,34 +116,26 @@ def test_to_selafin_eager_mode(tmp_path, slf_in):
@TIDAL_FLATS
def test_slice(tmp_path, slf_in):
# simple selection
ds_slf = xr.open_dataset(slf_in, engine="selafin")
nc_out = tmp_path / "test1.nc"
ds_slice = ds_slf.isel(time=0)
write_netcdf(ds_slice, nc_out)
ds_nc = xr.open_dataset(nc_out)
assert ds_nc.equals(ds_slice)
with xr.open_dataset(slf_in, engine="selafin") as ds_slf:
nc_out = tmp_path / "test1.nc"
ds_slice = ds_slf.isel(time=0)
assert equals_dataset_vs_netcdf_export(ds_slice, nc_out)
# simple range
ds_slf = xr.open_dataset(slf_in, engine="selafin")
nc_out = tmp_path / "test2.nc"
ds_slice = ds_slf.isel(time=slice(0, 10))
write_netcdf(ds_slice, nc_out)
ds_nc = xr.open_dataset(nc_out)
assert ds_nc.equals(ds_slice)
with xr.open_dataset(slf_in, engine="selafin") as ds_slf:
nc_out = tmp_path / "test2.nc"
ds_slice = ds_slf.isel(time=slice(0, 10))
assert equals_dataset_vs_netcdf_export(ds_slice, nc_out)
if "r3d" in slf_in:
# multiple slices
ds_slf = xr.open_dataset(slf_in, engine="selafin")
nc_out = tmp_path / "test3.nc"
ds_slice = ds_slf.isel(time=slice(0, 10), plan=0)
write_netcdf(ds_slice, nc_out)
ds_nc = xr.open_dataset(nc_out)
assert ds_nc.equals(ds_slice)
with xr.open_dataset(slf_in, engine="selafin") as ds_slf:
nc_out = tmp_path / "test3.nc"
ds_slice = ds_slf.isel(time=slice(0, 10), plan=0)
assert equals_dataset_vs_netcdf_export(ds_slice, nc_out)
# multiple range slices
ds_slf = xr.open_dataset(slf_in, engine="selafin")
nc_out = tmp_path / "test4.nc"
ds_slice = ds_slf.isel(time=slice(0, 10), plan=slice(5, 10))
write_netcdf(ds_slice, nc_out)
ds_nc = xr.open_dataset(nc_out)
assert ds_nc.equals(ds_slice)
with xr.open_dataset(slf_in, engine="selafin") as ds_slf:
nc_out = tmp_path / "test4.nc"
ds_slice = ds_slf.isel(time=slice(0, 10), plan=slice(5, 10))
assert equals_dataset_vs_netcdf_export(ds_slice, nc_out)


def test_from_scratch(tmp_path):
Expand Down Expand Up @@ -165,5 +165,5 @@ def test_from_scratch(tmp_path):

@DIMS
def test_dim(slf_in):
ds = xr.open_dataset(slf_in, engine="selafin")
repr(ds)
with xr.open_dataset(slf_in, engine="selafin") as ds:
repr(ds)
2 changes: 1 addition & 1 deletion xarray_selafin/Serafin.py
Original file line number Diff line number Diff line change
Expand Up @@ -987,7 +987,7 @@ def __enter__(self):
self.file = open(self.filename, self.mode)
return self

def __exit__(self, exc_type, exc_val, exc_tb):
def __exit__(self, *args, **kwargs):
self.file.close()
return False

Expand Down
52 changes: 28 additions & 24 deletions xarray_selafin/xarray_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,32 +146,31 @@ def write_serafin(fout, ds):
except KeyError:
slf_header.build_params()

resout = Serafin.Write(fout, slf_header.language, overwrite=True)
resout.__enter__()
resout.write_header(slf_header)
with Serafin.Write(fout, slf_header.language, overwrite=True) as resout:
resout.write_header(slf_header)

t0 = np.datetime64(datetime(*slf_header.date))
t0 = np.datetime64(datetime(*slf_header.date))

try:
time_serie = compute_duration_between_datetime(t0, ds.time.values)
except AttributeError:
return # no time (header only is written)
if isinstance(time_serie, float):
time_serie = [time_serie]
for it, t_ in enumerate(time_serie):
temp = np.empty(shape, dtype=slf_header.np_float_type)
for iv, var in enumerate(slf_header.var_IDs):
if slf_header.nb_frames == 1:
temp[iv] = ds[var]
else:
temp[iv] = ds.isel(time=it)[var]
if slf_header.nb_planes > 1:
temp[iv] = np.reshape(np.ravel(temp[iv]), (slf_header.nb_planes, slf_header.nb_nodes_2d))
resout.write_entire_frame(
slf_header,
t_,
np.reshape(temp, (slf_header.nb_var, slf_header.nb_nodes)),
)
try:
time_serie = compute_duration_between_datetime(t0, ds.time.values)
except AttributeError:
return # no time (header only is written)
if isinstance(time_serie, float):
time_serie = [time_serie]
for it, t_ in enumerate(time_serie):
temp = np.empty(shape, dtype=slf_header.np_float_type)
for iv, var in enumerate(slf_header.var_IDs):
if slf_header.nb_frames == 1:
temp[iv] = ds[var]
else:
temp[iv] = ds.isel(time=it)[var]
if slf_header.nb_planes > 1:
temp[iv] = np.reshape(np.ravel(temp[iv]), (slf_header.nb_planes, slf_header.nb_nodes_2d))
resout.write_entire_frame(
slf_header,
t_,
np.reshape(temp, (slf_header.nb_var, slf_header.nb_nodes)),
)


class SelafinLazyArray(BackendArray):
Expand Down Expand Up @@ -319,6 +318,11 @@ def open_dataset(

ds = xr.Dataset(data_vars=data_vars, coords=coords)

# Avoid a ResourceWarning (unclosed file)
def close():
slf.__exit__()
ds.set_close(close)

ds.attrs["title"] = slf.header.title.decode(Serafin.SLF_EIT).strip()
ds.attrs["language"] = slf.header.language
ds.attrs["float_size"] = slf.header.float_size
Expand Down