From 9fd00d2411a51c3b7daebdbda9802ae362ca6473 Mon Sep 17 00:00:00 2001 From: Caesar Tuguinay <87830138+ctuguinay@users.noreply.github.com> Date: Wed, 7 Aug 2024 19:34:31 -0700 Subject: [PATCH] Allow external arrays in add depth; Add universal ping time alignment function (#1369) * move depth tests outside of consolidate_integration * add space * add align_to_ping_time function, and replace the other interp functions * modify test to no longer check for ffill condition * add test for align_ping_time_values * allow external depth_offset and tilt arrays to add_depth * add test for multidim depth offset and tilt value errors * fix test_add_location * remove redundant aligning to ping time logic * add case where external_da time is None; be more lenient on dropping NaNs in harmonize env param times * add tests for irregular expected_da inputs * simplify glider test * check that the correct extrapolation was done * fix glider azfp test * separate out interp from nmea selection * fix loc name * fix loc names * fix estimate background noise upsampling * Update echopype/utils/align.py Co-authored-by: Wu-Jung Lee * Update echopype/utils/align.py Co-authored-by: Wu-Jung Lee * Update echopype/consolidate/api.py Co-authored-by: Wu-Jung Lee * Update echopype/consolidate/api.py Co-authored-by: Wu-Jung Lee * set 3 senarios to 4 scenarios --------- Co-authored-by: Wu-Jung Lee --- echopype/calibrate/env_params.py | 11 +- echopype/clean/api.py | 7 +- echopype/consolidate/api.py | 72 +- echopype/consolidate/ek_depth_utils.py | 30 +- echopype/consolidate/loc_utils.py | 24 +- echopype/tests/calibrate/test_calibrate.py | 41 -- echopype/tests/calibrate/test_env_params.py | 13 +- echopype/tests/clean/test_noise.py | 49 +- echopype/tests/consolidate/test_add_depth.py | 658 ++++++++++++++++++ .../tests/consolidate/test_add_location.py | 16 +- .../test_consolidate_integration.py | 543 --------------- echopype/tests/utils/test_align.py | 281 ++++++++ echopype/utils/align.py | 61 ++ 13 files changed, 1122 insertions(+), 684 deletions(-) create mode 100644 echopype/tests/consolidate/test_add_depth.py create mode 100644 echopype/tests/utils/test_align.py create mode 100644 echopype/utils/align.py diff --git a/echopype/calibrate/env_params.py b/echopype/calibrate/env_params.py index 811b3476f..e1364dd99 100644 --- a/echopype/calibrate/env_params.py +++ b/echopype/calibrate/env_params.py @@ -6,6 +6,7 @@ from ..echodata import EchoData from ..utils import uwa +from ..utils.align import align_to_ping_time from .cal_params import param2da ENV_PARAMS = ( @@ -63,13 +64,9 @@ def harmonize_env_param_time( f"ping_time needs to be provided for comparison or interpolating {p.name}" ) - # Direct assignment if all timestamps are identical (EK60 data) - if np.array_equal(p["time1"].data, ping_time.data): - return p.rename({"time1": "ping_time"}) - - # Interpolate `p` to `ping_time` - return ( - p.dropna(dim="time1").interp(time1=ping_time).ffill(dim="ping_time").drop_vars("time1") + # Align array to ping time + return align_to_ping_time( + p.dropna(dim="time1", how="all"), "time1", ping_time, method="linear" ) return p diff --git a/echopype/clean/api.py b/echopype/clean/api.py index a2fd65151..14924a0e3 100644 --- a/echopype/clean/api.py +++ b/echopype/clean/api.py @@ -410,6 +410,7 @@ def estimate_background_noise( # Align noise `ping_time` to the first index of each coarsened `ping_time` bin noise = noise.assign_coords(ping_time=ping_num * np.arange(len(noise["ping_time"]))) + power_cal = power_cal.assign_coords(ping_time=np.arange(len(power_cal["ping_time"]))) # Limit max noise level noise = ( @@ -420,7 +421,9 @@ def estimate_background_noise( # Upsample noise to original ping time dimension Sv_noise = ( - noise.reindex({"ping_time": power_cal["ping_time"]}, method="ffill") + noise.reindex({"ping_time": power_cal["ping_time"]}, method="ffill").assign_coords( + ping_time=ds_Sv["ping_time"] + ) + spreading_loss + absorption_loss ) @@ -434,7 +437,7 @@ def remove_background_noise( ping_num: int, range_sample_num: int, background_noise_max: str = None, - SNR_threshold: float = 3.0, + SNR_threshold: float = "3.0dB", ) -> xr.Dataset: """ Remove noise by using estimates of background noise diff --git a/echopype/consolidate/api.py b/echopype/consolidate/api.py index 64ce1fa35..abc585c31 100644 --- a/echopype/consolidate/api.py +++ b/echopype/consolidate/api.py @@ -1,5 +1,6 @@ import datetime import pathlib +from numbers import Number from pathlib import Path from typing import Optional, Union @@ -9,6 +10,7 @@ from ..calibrate.ek80_complex import get_filter_coeff from ..echodata import EchoData from ..echodata.simrad import retrieve_correct_beam_group +from ..utils.align import align_to_ping_time from ..utils.io import get_file_format, open_source from ..utils.log import _init_logger from ..utils.prov import add_processing_level @@ -17,7 +19,7 @@ ek_use_platform_angles, ek_use_platform_vertical_offsets, ) -from .loc_utils import check_loc_time_dim_duplicates, check_loc_vars_validity, sel_interp +from .loc_utils import check_loc_time_dim_duplicates, check_loc_vars_validity, sel_nmea from .split_beam_angle import get_angle_complex_samples, get_angle_power_samples logger = _init_logger(__name__) @@ -65,8 +67,8 @@ def swap_dims_channel_frequency(ds: Union[xr.Dataset, str, pathlib.Path]) -> xr. def add_depth( ds: Union[xr.Dataset, str, pathlib.Path], echodata: Optional[Union[EchoData, str, pathlib.Path]] = None, - depth_offset: Optional[float] = None, - tilt: Optional[float] = None, + depth_offset: Optional[Union[Number, xr.DataArray]] = None, + tilt: Optional[Union[Number, xr.DataArray]] = None, downward: bool = True, use_platform_vertical_offsets: bool = False, use_platform_angles: bool = False, @@ -82,11 +84,15 @@ def add_depth( Source Sv dataset to which a depth variable will be added. echodata : Optional[EchoData, str, pathlib.Path], default `None` `EchoData` object from which the `Sv` dataset originated. - depth_offset : Optional[float], default None + depth_offset : Optional[Union[Number, xr.DataArray]], default None Offset along the vertical (depth) dimension to account for actual transducer position in water, since `echo_range` is counted from transducer surface. - tilt : Optional[float], default None. + If provided as an xr.DataArray, it must have a single dimension corresponding to time. + The data array should represent the depth offset in meters at each time step. + tilt : Optional[Union[Number, xr.DataArray]], default None. Transducer tilt angle [degree]. 0 corresponds to a transducer pointing vertically. + If provided as an xr.DataArray, it must have a single dimension corresponding to time. + The data array should represent the tilt angle in degrees at each time step. downward : bool, default `True` The transducers point downward. use_platform_vertical_offsets: bool, default `False` @@ -155,9 +161,21 @@ def add_depth( # Initialize transducer depth to 0.0 (no effect on depth) transducer_depth = 0.0 - if depth_offset: + if isinstance(depth_offset, Number): # Set transducer depth to user specified depth offset transducer_depth = depth_offset + if isinstance(depth_offset, xr.DataArray): + # Will not accept data variables that don't have a single dimension + if len(depth_offset.dims) != 1: + raise ValueError( + "If depth_offset is passed in as an xr.DataArray, " + "it must contain a single dimension." + ) + else: + # Align `depth_offset` to `ping_time` + transducer_depth = align_to_ping_time( + depth_offset, depth_offset.dims[0], ds["ping_time"] + ) elif echodata and sonar_model in ["EK60", "EK80"]: if use_platform_vertical_offsets: # Compute transducer depth in EK systems using platform vertical offset data @@ -167,9 +185,20 @@ def add_depth( # Initialize echo range scaling to 1.0 (no effect on depth) echo_range_scaling = 1.0 - if tilt: + if isinstance(tilt, Number): # Set echo range scaling (angular scaling) using user specified tilt echo_range_scaling = np.cos(np.deg2rad(tilt)) + if isinstance(tilt, xr.DataArray): + # Will not accept data variables that don't have a single dimension + if len(tilt.dims) != 1: + raise ValueError( + "If tilt is passed in as an xr.DataArray, it must contain a single dimension." + ) + else: + # Align `tilt` to `ping_time` + echo_range_scaling = np.cos( + np.deg2rad(align_to_ping_time(tilt, tilt.dims[0], ds["ping_time"])) + ) elif echodata and sonar_model in ["EK60", "EK80"]: if use_platform_angles: # Compute echo range scaling in EK systems using platform angle data @@ -275,23 +304,18 @@ def add_location( # Copy dataset interp_ds = ds.copy() - # Interpolate location variables and place into `interp_ds` - interp_ds["latitude"] = sel_interp( - ds=ds, - echodata=echodata, - loc_name=lat_name, - time_dim_name=time_dim_name, - nmea_sentence=nmea_sentence, - datagram_type=datagram_type, - ) - interp_ds["longitude"] = sel_interp( - ds=ds, - echodata=echodata, - loc_name=lon_name, - time_dim_name=time_dim_name, - nmea_sentence=nmea_sentence, - datagram_type=datagram_type, - ) + # Select NMEA subset (if applicable) and interpolate location variables and place + # into `interp_ds`. + for loc_name, interp_loc_name in [(lat_name, "latitude"), (lon_name, "longitude")]: + loc_var = sel_nmea( + echodata=echodata, + loc_name=loc_name, + nmea_sentence=nmea_sentence, + datagram_type=datagram_type, + ) + interp_ds[interp_loc_name] = align_to_ping_time( + loc_var, time_dim_name, ds["ping_time"], "linear" + ) # Most attributes are attached automatically via interpolation # here we add the history diff --git a/echopype/consolidate/ek_depth_utils.py b/echopype/consolidate/ek_depth_utils.py index 6a83f4d09..4b6e9084e 100644 --- a/echopype/consolidate/ek_depth_utils.py +++ b/echopype/consolidate/ek_depth_utils.py @@ -2,6 +2,7 @@ import xarray as xr from scipy.spatial.transform import Rotation as R +from ..utils.align import align_to_ping_time from ..utils.log import _init_logger logger = _init_logger(__name__) @@ -26,31 +27,6 @@ def _check_and_log_nans( ) -def _var_time2_to_ping_time(var_with_time2, ping_time_da): - """ - If `time2` does not differ from `var`, we rename `time2` to 'ping_time', - else interpolate `transducer_depth`'s `time2` dimension to `ping_time`. - - Useful for handling EK60 and EK80 platform data: - - EK80 will have platform variables with time2 dimension that does not match - Beam group ping time values, while EK60 will have time2 dimension that - matches Beam group ping time values. - """ - if not ping_time_da.equals(var_with_time2["time2"].rename({"time2": "ping_time"})): - var_with_ping_time = var_with_time2.interp( - {"time2": ping_time_da}, - method="nearest", - # More details for `fill_value` and `extrapolate` can be found here: - # https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.interp1d.html # noqa - kwargs={"fill_value": "extrapolate"}, - ).drop_vars("time2") - else: - var_with_ping_time = var_with_time2.rename({"time2": "ping_time"}) - - return var_with_ping_time - - def ek_use_platform_vertical_offsets( platform_ds: xr.Dataset, ping_time_da: xr.DataArray, @@ -72,7 +48,7 @@ def ek_use_platform_vertical_offsets( # Compute z translation for transducer position vs water level transducer_depth = transducer_offset_z - (water_level + vertical_offset) - return _var_time2_to_ping_time(transducer_depth, ping_time_da) + return align_to_ping_time(transducer_depth, "time2", ping_time_da) def ek_use_platform_angles(platform_ds: xr.Dataset, ping_time_da: xr.DataArray) -> xr.DataArray: @@ -95,7 +71,7 @@ def ek_use_platform_angles(platform_ds: xr.Dataset, ping_time_da: xr.DataArray) echo_range_scaling, dims="time2", coords={"time2": platform_ds["time2"]} ) - return _var_time2_to_ping_time(echo_range_scaling, ping_time_da) + return align_to_ping_time(echo_range_scaling, "time2", ping_time_da) def ek_use_beam_angles( diff --git a/echopype/consolidate/loc_utils.py b/echopype/consolidate/loc_utils.py index 20b77ec41..fb0610ea1 100644 --- a/echopype/consolidate/loc_utils.py +++ b/echopype/consolidate/loc_utils.py @@ -118,16 +118,14 @@ def check_loc_time_dim_duplicates(echodata: EchoData, time_dim_name: str) -> Non ) -def sel_interp( - ds: xr.Dataset, +def sel_nmea( echodata: EchoData, loc_name: str, - time_dim_name: str, nmea_sentence: Union[str, None] = None, datagram_type: Union[str, None] = None, ) -> xr.DataArray: """ - Selection and interpolation for a location variable. + Select location subset for a location variable based on NMEA sentence. The selection logic is as follows, with 4 possible scenarios: Note here datagram_type = None is equivalent to datagram_type = NMEA @@ -137,13 +135,10 @@ def sel_interp( 3) If datagram_type is not None and nmea_sentence is None, then do nothing. 4) If datagram_type is not None and nmea_sentence is not None, then raise ValueError since NMEA sentence selection can only be used on location variables from the NMEA datagram. - - After selection logic, the location variable is then interpolated time-wise to match - that of the input dataset's time dimension. """ # NMEA sentence selection if datagram_type is None (NMEA corresponds to None) if nmea_sentence and datagram_type is None: - position_var = echodata["Platform"][loc_name][ + return echodata["Platform"][loc_name][ echodata["Platform"]["sentence_type"] == nmea_sentence ] elif nmea_sentence and datagram_type is not None: @@ -151,15 +146,4 @@ def sel_interp( "If datagram_type is not `None`, then `nmea_sentence` cannot be specified." ) else: - position_var = echodata["Platform"][loc_name] - - if len(position_var) == 1: - # Propagate single, fixed-location coordinate - return xr.DataArray( - data=position_var.values[0] * np.ones(len(ds["ping_time"]), dtype=np.float64), - dims=["ping_time"], - attrs=position_var.attrs, - ) - else: - # Values may be nan if there are ping_time values outside the time_dim_name range - return position_var.interp(**{time_dim_name: ds["ping_time"]}) + return echodata["Platform"][loc_name] diff --git a/echopype/tests/calibrate/test_calibrate.py b/echopype/tests/calibrate/test_calibrate.py index d07551ea8..4372fe1f1 100644 --- a/echopype/tests/calibrate/test_calibrate.py +++ b/echopype/tests/calibrate/test_calibrate.py @@ -310,53 +310,12 @@ def test_compute_Sv_combined_ed_ping_time_extend_past_time1(): "pH", ] - # Grab time variables - time1 = ed_combined["Environment"]["time1"] - ping_time = ed_combined["Sonar/Beam_group1"]["ping_time"] - # Iterate through vars for env_var_name in environment_related_variable_names: env_var = ds_Sv[env_var_name] # Check that no NaNs exist assert not np.any(np.isnan(env_var.data)) - # Check that all values past the max of time1 are ffilled with value - # that is time-wise closest to max of time1 - if "channel" not in env_var.dims: - assert np.allclose( - np.unique( - env_var.sel( - ping_time=slice( - time1.max(), - ping_time.max() - ) - ).data - ), - env_var.sel(ping_time=slice(time1.max())).data[-1] - ) - else: - # Iterate through environment variable channels to do the same - # check as above per channel - for channel_index in range(len(env_var["channel"])): - assert np.allclose( - np.unique( - env_var.isel( - channel=channel_index - ) - .sel( - ping_time=slice( - time1.max(), - ping_time.max() - ) - ).data - ), - env_var.isel( - channel=channel_index - ).sel( - ping_time=slice(time1.max()) - ).data[-1] - ) - @pytest.mark.parametrize( "raw_path, sonar_model, xml_path, waveform_mode, encode_mode", diff --git a/echopype/tests/calibrate/test_env_params.py b/echopype/tests/calibrate/test_env_params.py index 98e31d74b..8337d2766 100644 --- a/echopype/tests/calibrate/test_env_params.py +++ b/echopype/tests/calibrate/test_env_params.py @@ -30,6 +30,7 @@ def ek80_cal_path(test_path): return test_path["EK80_CAL"] +@pytest.mark.unit def test_harmonize_env_param_time(): # Scalar p = 10.05 @@ -67,16 +68,6 @@ def test_harmonize_env_param_time(): assert (p_new.data == p.data).all() # ping_time target requires actual interpolation - ping_time_target = xr.DataArray( - data=[1, 2], - coords={ - "ping_time": np.array( - ["2017-06-20T01:00:15", "2017-06-20T01:00:30"], dtype="datetime64[ns]" - ) - }, - dims=["ping_time"], - ) - ping_time_target = xr.DataArray( data=[1], coords={ @@ -133,6 +124,7 @@ def test_harmonize_env_param_time(): # .all computes dask array under the hood assert (p_new.data == [0.5, 2880.5]).all() + @pytest.mark.unit def test_harmonize_env_param_time_only_one_non_NaN_along_time1(): # Create data array with time1 dimension with only 1 non-NaN value @@ -145,6 +137,7 @@ def test_harmonize_env_param_time_only_one_non_NaN_along_time1(): assert output_da == 1, "Output data array should just be 1" assert 'time1' not in output_da.dims, "```harmonize_env_param_time``` should have dropped 'time1' dimension" + @pytest.mark.parametrize( ("user_dict", "channel", "out_dict"), [ diff --git a/echopype/tests/clean/test_noise.py b/echopype/tests/clean/test_noise.py index 3f63f212c..c4eba834a 100644 --- a/echopype/tests/clean/test_noise.py +++ b/echopype/tests/clean/test_noise.py @@ -2,6 +2,7 @@ import xarray as xr import echopype as ep import pytest +import pandas as pd from echopype.clean.utils import ( extract_dB, @@ -858,13 +859,55 @@ def test_mask_attenuated_signal_against_echopy(chunk): ) +@pytest.mark.unit +def test_estimate_background_noise_upsampling(): + """Test for the correct upsampling behavior in `estimate_background_noise`""" + # Open Raw and Calibrate + ed = ep.open_raw( + "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170615-T190214.raw", + sonar_model="EK60" + ) + ds_Sv = ep.calibrate.compute_Sv(ed) + + # Estimate background noise + Sv_noise = ep.clean.estimate_background_noise( + ds_Sv, ping_num=2, range_sample_num=5, + ) + + # Check that the correct upsampling has occurred + for channel_idx in range(len(ds_Sv["channel"])): + for range_sample_idx in range(len(ds_Sv["range_sample"])): + # Grab vector that just has ping time dimension + Sv_noise_vector = Sv_noise.isel(channel=channel_idx, range_sample=range_sample_idx) + + # Check that the values repeat every 2 values: + + # Grab numpy values + data_np = Sv_noise_vector.values + + # Handle odd length by removing the last element + if len(data_np) % 2 != 0: + data_np = data_np[:-1] + + # Reshape so that the pairs are columnwise next to each other + pairs = data_np.reshape(-1, 2) + + # Assert that the pairs are equal + assert np.allclose(pairs[:,0], pairs[:,1]) + + +@pytest.mark.integration def test_remove_background_noise(): """Test remove_background_noise on toy data""" # Parameters for fake data nchan, npings, nrange_samples = 1, 10, 100 chan = np.arange(nchan).astype(str) - ping_index = np.arange(npings) + ping_time = pd.date_range( + start='2015-06-30T17:30:10.000000000', + end='2015-06-30T17:30:25.000000000', + periods=npings, + ) range_sample = np.arange(nrange_samples) data = np.ones(nrange_samples) @@ -878,7 +921,7 @@ def test_remove_background_noise(): [data], coords=[ ('channel', chan), - ('ping_time', ping_index), + ('ping_time', ping_time), ('range_sample', range_sample), ], ) @@ -915,7 +958,7 @@ def test_remove_background_noise(): data, coords=[ ('channel', chan), - ('ping_time', ping_index), + ('ping_time', ping_time), ('range_sample', range_sample), ], ) diff --git a/echopype/tests/consolidate/test_add_depth.py b/echopype/tests/consolidate/test_add_depth.py new file mode 100644 index 000000000..6c2c1aff6 --- /dev/null +++ b/echopype/tests/consolidate/test_add_depth.py @@ -0,0 +1,658 @@ +import pytest +import xarray as xr +import pandas as pd +import numpy as np +from scipy.spatial.transform import Rotation as R + +from echopype.utils.align import align_to_ping_time + +import echopype as ep +from echopype.consolidate.ek_depth_utils import ( + ek_use_platform_vertical_offsets, ek_use_platform_angles, ek_use_beam_angles +) + + +def _build_ds_Sv(channel, range_sample, ping_time, sample_interval): + return xr.Dataset( + data_vars={ + "Sv": ( + ("channel", "range_sample", "ping_time"), + np.random.random((len(channel), range_sample.size, ping_time.size)), + ), + "echo_range": ( + ("channel", "range_sample", "ping_time"), + ( + np.swapaxes(np.tile(range_sample, (len(channel), ping_time.size, 1)), 1, 2) + * sample_interval + ), + ), + }, + coords={ + "channel": channel, + "range_sample": range_sample, + "ping_time": ping_time, + }, + ) + + +@pytest.mark.unit +def test_ek_use_platform_vertical_offsets_output(): + """ + Test `use_platform_vertical_offsets` outputs. + """ + ping_time_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=5, freq="4h"), dims=("ping_time")) + time2_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=4, freq="5h"), dims=("time2")) + platform_ds = xr.Dataset( + { + "water_level": xr.DataArray( + [1.5, 0.5, 0.0, 1.0], + dims=("time2") + ), + "vertical_offset": xr.DataArray( + [1.0, 0.0, 0.0, 1.0], + dims=("time2") + ), + "transducer_offset_z": xr.DataArray( + [3.0, 1.5, 0.0, 11.15], + dims=("time2") + ), + }, + coords={"time2": time2_da} + ) + transducer_depth = ep.consolidate.ek_depth_utils.ek_use_platform_vertical_offsets( + platform_ds, + ping_time_da + ) + + # Check transducer depth values + assert np.allclose(transducer_depth.values, np.array([0.5, 1.0, 0.0, 0.0, 9.15])) + + +@pytest.mark.unit +def test_ek_use_platform_angles_output(): + """ + Test `use_platform_angles` outputs for 2 times when the boat is completely sideways + and 1 time when the boat has no sideways component (completely straight on z-axis). + """ + # Create a Dataset with ping-time-specific pitch and roll arc degrees + # (with possible range -90 deg to 90 deg). + # During the 1st and 2nd time2, the platform is completely sideways so the echo range scaling + # should be 0 (i.e zeros out the entire depth). + # During the 3rd time2, the platform is completely vertical so the echo range scaling should + # be 1 (i.e no change). + # During the 4th time2, the platform is tilted by 45 deg so the echo range scaling should + # be 1/sqrt(2). + ping_time_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=5, freq="4h"), dims=("ping_time")) + time2_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=4, freq="5h"), dims=("time2")) + platform_ds = xr.Dataset( + { + "pitch": xr.DataArray( + [-90, 0, 0, -45], + dims=("time2") + ), + "roll": xr.DataArray( + [0, 90, 0, 0], + dims=("time2") + ), + }, + coords={"time2": time2_da} + ) + echo_range_scaling = ep.consolidate.ek_depth_utils.ek_use_platform_angles(platform_ds, ping_time_da) + + # The two 1.0s here are from the interpolation + assert np.allclose(echo_range_scaling.values, np.array([0.0, 0.0, 1.0, 1.0, 1/np.sqrt(2)])) + + +@pytest.mark.unit +def test_ek_use_beam_angles_output(): + """ + Test `use_beam_angle` outputs for 2 sideways looking beams and 1 vertical looking beam. + """ + # Create a Dataset with channel-specific beam direction vectors + # In channels 1 and 2, the transducers are not pointing vertically at all so the echo + # range scaling should be 0 (i.e zeros out the entire depth). + # In channel 3, the transducer is completely vertical so the echo range scaling should + # be 1 (i.e no change). + # In channel 4, the transducer is tilted to the x direction by 30 deg, so the + # echo range scaling should be sqrt(3)/2. + channel_da = xr.DataArray(["chan1", "chan2", "chan3", "chan4"], dims=("channel")) + beam_ds = xr.Dataset( + { + "beam_direction_x": xr.DataArray([1, 0, 0, 1/2], dims=("channel")), + "beam_direction_y": xr.DataArray([0, 1, 0, 0], dims=("channel")), + "beam_direction_z": xr.DataArray([0, 0, 1, np.sqrt(3)/2], dims=("channel")), + }, + coords={"channel": channel_da} + ) + echo_range_scaling = ep.consolidate.ek_depth_utils.ek_use_beam_angles(beam_ds) + assert np.allclose(echo_range_scaling.values, np.array([0.0, 0.0, 1.0, np.sqrt(3)/2])) + + +@pytest.mark.integration +@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ + ( + "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", + "EK80", + {"waveform_mode":"BB", "encode_mode":"complex"} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + "EK80", + {"waveform_mode":"CW", "encode_mode":"power"} + ) +]) +def test_ek_depth_utils_dims(file, sonar_model, compute_Sv_kwargs): + """ + Tests `ek_use_platform_vertical_offsets`, `ek_use_platform_angles`, and + `ek_use_beam_angles` for correct dimensions. + """ + # Open EK Raw file and Compute Sv + ed = ep.open_raw(file, sonar_model=sonar_model) + ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) + + # Check dimensions for using EK platform vertical offsets to compute + # `transducer_depth`. + transducer_depth = ek_use_platform_vertical_offsets( + platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"] + ) + assert transducer_depth.dims == ('channel', 'ping_time') + assert transducer_depth["channel"].equals(ds_Sv["channel"]) + assert transducer_depth["ping_time"].equals(ds_Sv["ping_time"]) + + # Check dimensions for using EK platform angles to compute + # `platform_echo_range_scaling`. + platform_echo_range_scaling = ek_use_platform_angles(platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"]) + assert platform_echo_range_scaling.dims == ('ping_time',) + assert platform_echo_range_scaling["ping_time"].equals(ds_Sv["ping_time"]) + + # Check dimensions for using EK beam angles to compute `beam_echo_range_scaling`. + beam_echo_range_scaling = ek_use_beam_angles(beam_ds=ed["Sonar/Beam_group1"]) + assert beam_echo_range_scaling.dims == ('channel',) + assert beam_echo_range_scaling["channel"].equals(ds_Sv["channel"]) + + +@pytest.mark.integration +def test_ek_depth_utils_group_variable_NaNs_logger_warnings(caplog): + """ + Tests `ek_use_platform_vertical_offsets`, `ek_use_platform_angles`, and + `ek_use_beam_angles` for correct logger warnings when NaNs exist in group + variables. + """ + # Open EK Raw file and Compute Sv + ed = ep.open_raw( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + sonar_model="EK80" + ) + ds_Sv = ep.calibrate.compute_Sv(ed, **{"waveform_mode":"CW", "encode_mode":"power"}) + + # Set first index of group variables to NaN + ed["Platform"]["water_level"] = np.nan # Is a scalar + ed["Platform"]["vertical_offset"].values[0] = np.nan + ed["Platform"]["transducer_offset_z"].values[0] = np.nan + ed["Platform"]["pitch"].values[0] = np.nan + ed["Platform"]["roll"].values[0] = np.nan + ed["Sonar/Beam_group1"]["beam_direction_x"].values[0] = np.nan + ed["Sonar/Beam_group1"]["beam_direction_y"].values[0] = np.nan + ed["Sonar/Beam_group1"]["beam_direction_z"].values[0] = np.nan + + # Turn on logger verbosity + ep.utils.log.verbose(override=False) + + # Run EK depth util functions: + ek_use_platform_vertical_offsets(platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"]) + ek_use_platform_angles(platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"]) + ek_use_beam_angles(beam_ds=ed["Sonar/Beam_group1"]) + + # Set (group, variable) name pairs + group_variable_name_pairs = [ + ["Platform", "water_level"], + ["Platform", "vertical_offset"], + ["Platform", "transducer_offset_z"], + ["Platform", "pitch"], + ["Platform", "roll"], + ["Sonar/Beam_group1", "beam_direction_x"], + ["Sonar/Beam_group1", "beam_direction_y"], + ["Sonar/Beam_group1", "beam_direction_z"], + ] + + # Check if the expected warnings are logged + for group_name, variable_name in group_variable_name_pairs: + expected_warning = ( + f"The Echodata `{group_name}` group `{variable_name}` variable array contains " + "NaNs. This will result in NaNs in the final `depth` array. Consider filling the " + "NaNs and calling `.add_depth(...)` again." + ) + assert any(expected_warning in record.message for record in caplog.records) + + # Turn off logger verbosity + ep.utils.log.verbose(override=True) + + +@pytest.mark.integration +def test_add_depth_tilt_depth_use_arg_logger_warnings(caplog): + """ + Tests warnings when `tilt` and `depth_offset` are being passed in when other + `use_*` arguments are passed in as `True`. + """ + # Open EK Raw file and Compute Sv + ed = ep.open_raw( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + sonar_model="EK80" + ) + ds_Sv = ep.calibrate.compute_Sv(ed, **{"waveform_mode":"CW", "encode_mode":"power"}) + + # Turn on logger verbosity + ep.utils.log.verbose(override=False) + + # Run `add_depth` with `tilt`, `depth_offset` as Non-NaN, using beam group angles, + # and platform vertical offset values + ep.consolidate.add_depth( + ds_Sv, + ed, + depth_offset=9.15, + tilt=0.1, + use_platform_vertical_offsets=True, + use_beam_angles=True, + ) + + # Check if the expected warnings are logged + depth_offset_warning = ( + "When `depth_offset` is specified, platform vertical offset " + "variables will not be used." + ) + tilt_warning = ( + "When `tilt` is specified, beam/platform angle variables will not be used." + ) + for warning in [depth_offset_warning, tilt_warning]: + assert any(warning in record.message for record in caplog.records) + + # Turn off logger verbosity + ep.utils.log.verbose(override=True) + + +@pytest.mark.integration +def test_add_depth_without_echodata(): + """ + Test `add_depth` without using Echodata Platform or Beam groups. + """ + # Build test Sv dataset + channel = ["channel_0", "channel_1", "channel_2"] + range_sample = np.arange(100) + ping_time = pd.date_range(start="2022-08-10T10:00:00", end="2022-08-10T12:00:00", periods=121) + sample_interval = 0.01 + ds_Sv = _build_ds_Sv(channel, range_sample, ping_time, sample_interval) + + # User input `depth_offset` + water_level = 10 + ds_Sv_depth = ep.consolidate.add_depth(ds_Sv, depth_offset=water_level) + assert ds_Sv_depth["depth"].equals(ds_Sv["echo_range"] + water_level) + + # User input `depth_offset` and `tilt` + tilt = 15 + ds_Sv_depth = ep.consolidate.add_depth(ds_Sv, depth_offset=water_level, tilt=tilt) + assert ds_Sv_depth["depth"].equals(ds_Sv["echo_range"] * np.cos(tilt / 180 * np.pi) + water_level) + + # Inverted echosounder with `depth_offset` and `tilt` + ds_Sv_depth = ep.consolidate.add_depth(ds_Sv, depth_offset=water_level, tilt=tilt, downward=False) + assert ds_Sv_depth["depth"].equals(-1 * ds_Sv["echo_range"] * np.cos(tilt / 180 * np.pi) + water_level) + + # Check history attribute + history_attribute = ds_Sv_depth["depth"].attrs["history"] + history_attribute_without_time = history_attribute[33:] + assert history_attribute_without_time == ". depth` calculated using: Sv `echo_range`." + + +@pytest.mark.integration +def test_add_depth_errors(): + """Check if all `add_depth` errors are raised appropriately.""" + # Open EK80 Raw file and Compute Sv + ed = ep.open_raw( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + sonar_model="EK80" + ) + ds_Sv = ep.calibrate.compute_Sv(ed, **{"waveform_mode":"CW", "encode_mode":"power"}) + + # Test that all three errors are called: + with pytest.raises(ValueError, match=( + "If any of `use_platform_vertical_offsets`, " + + "`use_platform_angles` " + + "or `use_beam_angles` is `True`, " + + "then `echodata` cannot be `None`." + )): + ep.consolidate.add_depth(ds_Sv, None, use_platform_angles=True) + with pytest.raises(NotImplementedError, match=( + "Computing depth with both platform and beam angles is not implemented yet." + )): + ep.consolidate.add_depth(ds_Sv, ed, use_platform_angles=True, use_beam_angles=True) + with pytest.raises(NotImplementedError, match=( + "`use_platform/beam_...` not implemented yet for `AZFP`." + )): + ed["Sonar"].attrs["sonar_model"] = "AZFP" + ep.consolidate.add_depth(ds_Sv, ed, use_platform_angles=True) + + +@pytest.mark.integration +@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ + ( + "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", + "EK80", + {"waveform_mode":"BB", "encode_mode":"complex"} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + "EK80", + {"waveform_mode":"CW", "encode_mode":"power"} + ) +]) +def test_add_depth_EK_with_platform_vertical_offsets(file, sonar_model, compute_Sv_kwargs): + """Test `depth` values when using EK Platform vertical offset values to compute it.""" + # Open EK Raw file and Compute Sv + ed = ep.open_raw(file, sonar_model=sonar_model) + ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) + + # Subset ds_Sv to include only first 5 `range_sample` coordinates + # since the test takes too long to iterate through every value + ds_Sv = ds_Sv.isel(range_sample=slice(0,5)) + + # Replace any Platform Vertical Offset NaN values with 0 + ed["Platform"]["water_level"] = ed["Platform"]["water_level"].fillna(0) + ed["Platform"]["vertical_offset"] = ed["Platform"]["vertical_offset"].fillna(0) + ed["Platform"]["transducer_offset_z"] = ed["Platform"]["transducer_offset_z"].fillna(0) + + # Compute `depth` using platform vertical offset values + ds_Sv_with_depth = ep.consolidate.add_depth(ds_Sv, ed, use_platform_vertical_offsets=True) + + # Check history attribute + history_attribute = ds_Sv_with_depth["depth"].attrs["history"] + history_attribute_without_time = history_attribute[33:] + assert history_attribute_without_time == ( + ". depth` calculated using: Sv `echo_range`, Echodata `Platform` Vertical Offsets." + ) + + # Compute transducer depth + transducer_depth = ek_use_platform_vertical_offsets(ed["Platform"], ds_Sv["ping_time"]) + + # Check if depth value is equal to corresponding `echo_range` value + transducer depth value + assert np.allclose( + ds_Sv_with_depth["depth"].data, + (ds_Sv["echo_range"] + transducer_depth).data, + rtol=1e-10, + atol=1e-10 + ) + + +@pytest.mark.integration +@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ + ( + "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", + "EK80", + {"waveform_mode":"BB", "encode_mode":"complex"} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + "EK80", + {"waveform_mode":"CW", "encode_mode":"power"} + ) +]) +def test_add_depth_EK_with_platform_angles(file, sonar_model, compute_Sv_kwargs): + """Test `depth` values when using EK Platform angles to compute it.""" + # Open EK Raw file and Compute Sv + ed = ep.open_raw(file, sonar_model=sonar_model) + ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) + + # Replace any Beam Angle NaN values with 0 + ed["Platform"]["pitch"] = ed["Platform"]["pitch"].fillna(0) + ed["Platform"]["roll"] = ed["Platform"]["roll"].fillna(0) + + # Compute `depth` using platform angle values + ds_Sv_with_depth = ep.consolidate.add_depth(ds_Sv, ed, use_platform_angles=True) + + # Check history attribute + history_attribute = ds_Sv_with_depth["depth"].attrs["history"] + history_attribute_without_time = history_attribute[33:] + assert history_attribute_without_time == ( + ". depth` calculated using: Sv `echo_range`, Echodata `Platform` Angles." + ) + + # Compute transducer depth + echo_range_scaling = ek_use_platform_angles(ed["Platform"], ds_Sv["ping_time"]) + + # Check if depth is equal to echo range scaling value * echo range + assert np.allclose( + ds_Sv_with_depth["depth"].data, + (echo_range_scaling * ds_Sv["echo_range"]).transpose("channel", "ping_time", "range_sample").data, + equal_nan=True + ) + + +@pytest.mark.integration +@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ + ( + "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", + "EK60", + {} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", + "EK80", + {"waveform_mode":"BB", "encode_mode":"complex"} + ), + ( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + "EK80", + {"waveform_mode":"CW", "encode_mode":"power"} + ) +]) +def test_add_depth_EK_with_beam_angles(file, sonar_model, compute_Sv_kwargs): + """Test `depth` values when using EK Beam angles to compute it.""" + # Open EK Raw file and Compute Sv + ed = ep.open_raw(file, sonar_model=sonar_model) + ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) + + # Replace Beam Angle NaN values + ed["Sonar/Beam_group1"]["beam_direction_x"] = ed["Sonar/Beam_group1"]["beam_direction_x"].fillna(0) + ed["Sonar/Beam_group1"]["beam_direction_y"] = ed["Sonar/Beam_group1"]["beam_direction_y"].fillna(0) + ed["Sonar/Beam_group1"]["beam_direction_z"] = ed["Sonar/Beam_group1"]["beam_direction_z"].fillna(1) + + # Compute `depth` using beam angle values + ds_Sv_with_depth = ep.consolidate.add_depth(ds_Sv, ed, use_beam_angles=True) + + # Check history attribute + history_attribute = ds_Sv_with_depth["depth"].attrs["history"] + history_attribute_without_time = history_attribute[33:] + assert history_attribute_without_time == ( + ". depth` calculated using: Sv `echo_range`, Echodata `Beam_group1` Angles." + ) + + # Compute echo range scaling values + echo_range_scaling = ek_use_beam_angles(ed["Sonar/Beam_group1"]) + + # Check if depth is equal to echo range scaling value * echo range + assert np.allclose( + ds_Sv_with_depth["depth"].data, + (echo_range_scaling * ds_Sv["echo_range"]).transpose("channel", "ping_time", "range_sample").data, + equal_nan=True + ) + + +@pytest.mark.integration +@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs, expected_beam_group_name", [ + ( + "echopype/test_data/ek80/Summer2018--D20180905-T033113.raw", + "EK80", + {"waveform_mode":"BB", "encode_mode":"complex"}, + "Beam_group1" + ), + ( + "echopype/test_data/ek80/Summer2018--D20180905-T033113.raw", + "EK80", + {"waveform_mode":"CW", "encode_mode":"power"}, + "Beam_group2" + ) +]) +def test_add_depth_EK_with_beam_angles_with_different_beam_groups( + file, sonar_model, compute_Sv_kwargs, expected_beam_group_name +): + """ + Test `depth` channel when using EK Beam angles from two separate calibrated + Sv datasets (that are from the same raw file) using two differing pairs of + calibration key word arguments. The two tests should correspond to different + beam groups i.e. beam group 1 and beam group 2. + """ + # Open EK Raw file and Compute Sv + ed = ep.open_raw(file, sonar_model=sonar_model) + ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) + + # Compute `depth` using beam angle values + ds_Sv = ep.consolidate.add_depth(ds_Sv, ed, use_beam_angles=True) + + # Check history attribute + history_attribute = ds_Sv["depth"].attrs["history"] + history_attribute_without_time = history_attribute[33:] + assert history_attribute_without_time == ( + f". depth` calculated using: Sv `echo_range`, Echodata `{expected_beam_group_name}` Angles." + ) + + +@pytest.mark.integration +def test_add_depth_with_external_glider_depth_and_tilt_array(): + """ + Test add_depth with external glider depth offset and tilt array data. + """ + # Open RAW + ed = ep.open_raw( + raw_file="echopype/test_data/azfp/rutgers_glider_external_nc/18011107.01A", + xml_path="echopype/test_data/azfp/rutgers_glider_external_nc/18011107.XML", + sonar_model="azfp" + ) + + # Open external glider dataset + glider_ds = xr.open_dataset( + "echopype/test_data/azfp/rutgers_glider_external_nc/ru32-20180109T0531-profile-sci-delayed-subset.nc", + engine="netcdf4" + ) + + # Grab external environment parameters + env_params_means = {} + for env_var in ["temperature", "salinity", "pressure"]: + env_params_means[env_var] = float(glider_ds[env_var].mean().values) + + # Compute Sv with external environment parameters + ds_Sv = ep.calibrate.compute_Sv(ed, env_params=env_params_means) + + # Grab pitch and roll from platform group + pitch = np.rad2deg(glider_ds["m_pitch"]) + roll = np.rad2deg(glider_ds["m_roll"]) + + # Compute tilt in degrees from pitch roll rotations + yaw = np.zeros_like(pitch.values) + yaw_pitch_roll_euler_angles_stack = np.column_stack([yaw, pitch.values, roll.values]) + yaw_rot_pitch_roll = R.from_euler("ZYX", yaw_pitch_roll_euler_angles_stack, degrees=True) + glider_tilt = yaw_rot_pitch_roll.as_matrix()[:, -1, -1] + glider_tilt = xr.DataArray( + glider_tilt, dims="time", coords={"time": glider_ds["time"]} + ) + glider_tilt = np.rad2deg(glider_tilt) + + # Add auxiliary depth and tilt + ds_Sv_with_depth = ep.consolidate.add_depth( + ds_Sv, + depth_offset=glider_ds["depth"].dropna("time"), + tilt=glider_tilt.dropna("time") + ) + depth_da = ds_Sv_with_depth["depth"] + + # Align glider depth and glider tilt to ping time + glider_depth_aligned = align_to_ping_time( + glider_ds["depth"].dropna("time"), "time", ds_Sv["ping_time"], + ) + glider_tilt_aligned = align_to_ping_time( + glider_tilt.dropna("time"), "time", ds_Sv["ping_time"], + ) + + # Compute expected depth + expected_depth_da = ( + glider_depth_aligned + ( + ds_Sv["echo_range"] * np.cos(np.deg2rad(glider_tilt_aligned)) + ) + ) + + # Check that the two depth arrays are equal + assert np.allclose(expected_depth_da, depth_da, equal_nan=True) + + +@pytest.mark.unit +def test_multi_dim_depth_offset_and_tilt_array_error(): + """ + Test that the correct `ValueError`s are raised when a multi-dimensional + array is passed into `add_depth` for the `depth_offset` and `tilt` + arguments. + """ + # Open EK Raw file and Compute Sv + ed = ep.open_raw( + "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", + sonar_model="EK80" + ) + ds_Sv = ep.calibrate.compute_Sv(ed, **{"waveform_mode":"CW", "encode_mode":"power"}) + + # Multi-dimensional mock array + multi_dim_da = xr.DataArray( + np.random.randn(5, 3), + dims=("x", "y"), + coords={"x": range(5), "y": range(3)} + ) + + # Test `add_depth` with multi-dim `depth_offset` + with pytest.raises( + ValueError, + match=( + "If depth_offset is passed in as an xr.DataArray, " + "it must contain a single dimension." + ) + ): + ep.consolidate.add_depth(ds_Sv, depth_offset=multi_dim_da) + + # Test `add_depth` with multi-dim `tilt` + with pytest.raises( + ValueError, + match=( + "If tilt is passed in as an xr.DataArray, " + "it must contain a single dimension." + ) + ): + ep.consolidate.add_depth(ds_Sv, tilt=multi_dim_da) diff --git a/echopype/tests/consolidate/test_add_location.py b/echopype/tests/consolidate/test_add_location.py index edc9a418a..18ab0b226 100644 --- a/echopype/tests/consolidate/test_add_location.py +++ b/echopype/tests/consolidate/test_add_location.py @@ -5,11 +5,11 @@ import xarray as xr import echopype as ep -from echopype.consolidate.loc_utils import sel_interp +from echopype.consolidate.loc_utils import sel_nmea @pytest.mark.unit -def test_sel_interp_value_error(): +def test_sel_nmea_value_error(): """ Check that the appropriate ValueError is raised when nmea_sentence!=None and datagram_type!=None. This would imply NMEA sentence selection of location variable not from the NMEA datagrams, which @@ -19,12 +19,10 @@ def test_sel_interp_value_error(): with pytest.raises(ValueError) as exc_info: # Pass in non-None values for nmea_sentence and datagram_type and leave the rest as # None (blank) since those are not needed to test this part of the function - sel_interp( - ds=None, + sel_nmea( echodata=None, - datagram_type="MRU1", loc_name=None, - time_dim_name=None, + datagram_type="MRU1", nmea_sentence="GGA" ) assert ("If datagram_type is not `None`, then `nmea_sentence` cannot be specified.") == str(exc_info.value) @@ -162,7 +160,11 @@ def _tests(ds_test, location_type, nmea_sentence=None): position_var = ed["Platform"][ed_position] if nmea_sentence: position_var = position_var[ed["Platform"]["sentence_type"] == nmea_sentence] - position_interp = position_var.interp(time1=ds_test["ping_time"]) + position_interp = position_var.interp( + {"time1": ds_test["ping_time"]}, + method="linear", + kwargs={"fill_value": "extrapolate"}, + ) # interpolated values are identical assert np.allclose(ds_test[ds_position].values, position_interp.values, equal_nan=True) # noqa elif location_type == "fixed-location": diff --git a/echopype/tests/consolidate/test_consolidate_integration.py b/echopype/tests/consolidate/test_consolidate_integration.py index 75a0f7de9..03341e07d 100644 --- a/echopype/tests/consolidate/test_consolidate_integration.py +++ b/echopype/tests/consolidate/test_consolidate_integration.py @@ -7,16 +7,10 @@ import pytest import numpy as np -import pandas as pd -import xarray as xr import scipy.io as io import echopype as ep from typing import List -from echopype.consolidate.ek_depth_utils import ( - ek_use_platform_vertical_offsets, ek_use_platform_angles, ek_use_beam_angles -) - """ For future reference: @@ -128,543 +122,6 @@ def test_swap_dims_channel_frequency(test_data_samples): assert str(e) == dup_freq_valueerror -def _build_ds_Sv(channel, range_sample, ping_time, sample_interval): - return xr.Dataset( - data_vars={ - "Sv": ( - ("channel", "range_sample", "ping_time"), - np.random.random((len(channel), range_sample.size, ping_time.size)), - ), - "echo_range": ( - ("channel", "range_sample", "ping_time"), - ( - np.swapaxes(np.tile(range_sample, (len(channel), ping_time.size, 1)), 1, 2) - * sample_interval - ), - ), - }, - coords={ - "channel": channel, - "range_sample": range_sample, - "ping_time": ping_time, - }, - ) - - -@pytest.mark.unit -def test_ek_use_platform_vertical_offsets_output(): - """ - Test `use_platform_vertical_offsets` outputs. - """ - ping_time_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=5, freq="4h"), dims=("ping_time")) - time2_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=4, freq="5h"), dims=("time2")) - platform_ds = xr.Dataset( - { - "water_level": xr.DataArray( - [1.5, 0.5, 0.0, 1.0], - dims=("time2") - ), - "vertical_offset": xr.DataArray( - [1.0, 0.0, 0.0, 1.0], - dims=("time2") - ), - "transducer_offset_z": xr.DataArray( - [3.0, 1.5, 0.0, 11.15], - dims=("time2") - ), - }, - coords={"time2": time2_da} - ) - transducer_depth = ep.consolidate.ek_depth_utils.ek_use_platform_vertical_offsets( - platform_ds, - ping_time_da - ) - - # Check transducer depth values - assert np.allclose(transducer_depth.values, np.array([0.5, 1.0, 0.0, 0.0, 9.15])) - - -@pytest.mark.unit -def test_ek_use_platform_angles_output(): - """ - Test `use_platform_angles` outputs for 2 times when the boat is completely sideways - and 1 time when the boat has no sideways component (completely straight on z-axis). - """ - # Create a Dataset with ping-time-specific pitch and roll arc degrees - # (with possible range -90 deg to 90 deg). - # During the 1st and 2nd time2, the platform is completely sideways so the echo range scaling - # should be 0 (i.e zeros out the entire depth). - # During the 3rd time2, the platform is completely vertical so the echo range scaling should - # be 1 (i.e no change). - # During the 4th time2, the platform is tilted by 45 deg so the echo range scaling should - # be 1/sqrt(2). - ping_time_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=5, freq="4h"), dims=("ping_time")) - time2_da = xr.DataArray(pd.date_range(start="2024-07-04", periods=4, freq="5h"), dims=("time2")) - platform_ds = xr.Dataset( - { - "pitch": xr.DataArray( - [-90, 0, 0, -45], - dims=("time2") - ), - "roll": xr.DataArray( - [0, 90, 0, 0], - dims=("time2") - ), - }, - coords={"time2": time2_da} - ) - echo_range_scaling = ep.consolidate.ek_depth_utils.ek_use_platform_angles(platform_ds, ping_time_da) - - # The two 1.0s here are from the interpolation - assert np.allclose(echo_range_scaling.values, np.array([0.0, 0.0, 1.0, 1.0, 1/np.sqrt(2)])) - - -@pytest.mark.unit -def test_ek_use_beam_angles_output(): - """ - Test `use_beam_angle` outputs for 2 sideways looking beams and 1 vertical looking beam. - """ - # Create a Dataset with channel-specific beam direction vectors - # In channels 1 and 2, the transducers are not pointing vertically at all so the echo - # range scaling should be 0 (i.e zeros out the entire depth). - # In channel 3, the transducer is completely vertical so the echo range scaling should - # be 1 (i.e no change). - # In channel 4, the transducer is tilted to the x direction by 30 deg, so the - # echo range scaling should be sqrt(3)/2. - channel_da = xr.DataArray(["chan1", "chan2", "chan3", "chan4"], dims=("channel")) - beam_ds = xr.Dataset( - { - "beam_direction_x": xr.DataArray([1, 0, 0, 1/2], dims=("channel")), - "beam_direction_y": xr.DataArray([0, 1, 0, 0], dims=("channel")), - "beam_direction_z": xr.DataArray([0, 0, 1, np.sqrt(3)/2], dims=("channel")), - }, - coords={"channel": channel_da} - ) - echo_range_scaling = ep.consolidate.ek_depth_utils.ek_use_beam_angles(beam_ds) - assert np.allclose(echo_range_scaling.values, np.array([0.0, 0.0, 1.0, np.sqrt(3)/2])) - - -@pytest.mark.integration -@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ - ( - "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", - "EK80", - {"waveform_mode":"BB", "encode_mode":"complex"} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", - "EK80", - {"waveform_mode":"CW", "encode_mode":"power"} - ) -]) -def test_ek_depth_utils_dims(file, sonar_model, compute_Sv_kwargs): - """ - Tests `ek_use_platform_vertical_offsets`, `ek_use_platform_angles`, and - `ek_use_beam_angles` for correct dimensions. - """ - # Open EK Raw file and Compute Sv - ed = ep.open_raw(file, sonar_model=sonar_model) - ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) - - # Check dimensions for using EK platform vertical offsets to compute - # `transducer_depth`. - transducer_depth = ek_use_platform_vertical_offsets( - platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"] - ) - assert transducer_depth.dims == ('channel', 'ping_time') - assert transducer_depth["channel"].equals(ds_Sv["channel"]) - assert transducer_depth["ping_time"].equals(ds_Sv["ping_time"]) - - # Check dimensions for using EK platform angles to compute - # `platform_echo_range_scaling`. - platform_echo_range_scaling = ek_use_platform_angles(platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"]) - assert platform_echo_range_scaling.dims == ('ping_time',) - assert platform_echo_range_scaling["ping_time"].equals(ds_Sv["ping_time"]) - - # Check dimensions for using EK beam angles to compute `beam_echo_range_scaling`. - beam_echo_range_scaling = ek_use_beam_angles(beam_ds=ed["Sonar/Beam_group1"]) - assert beam_echo_range_scaling.dims == ('channel',) - assert beam_echo_range_scaling["channel"].equals(ds_Sv["channel"]) - - -@pytest.mark.integration -def test_ek_depth_utils_group_variable_NaNs_logger_warnings(caplog): - """ - Tests `ek_use_platform_vertical_offsets`, `ek_use_platform_angles`, and - `ek_use_beam_angles` for correct logger warnings when NaNs exist in group - variables. - """ - # Open EK Raw file and Compute Sv - ed = ep.open_raw( - "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", - sonar_model="EK80" - ) - ds_Sv = ep.calibrate.compute_Sv(ed, **{"waveform_mode":"CW", "encode_mode":"power"}) - - # Set first index of group variables to NaN - ed["Platform"]["water_level"] = np.nan # Is a scalar - ed["Platform"]["vertical_offset"].values[0] = np.nan - ed["Platform"]["transducer_offset_z"].values[0] = np.nan - ed["Platform"]["pitch"].values[0] = np.nan - ed["Platform"]["roll"].values[0] = np.nan - ed["Sonar/Beam_group1"]["beam_direction_x"].values[0] = np.nan - ed["Sonar/Beam_group1"]["beam_direction_y"].values[0] = np.nan - ed["Sonar/Beam_group1"]["beam_direction_z"].values[0] = np.nan - - # Turn on logger verbosity - ep.utils.log.verbose(override=False) - - # Run EK depth util functions: - ek_use_platform_vertical_offsets(platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"]) - ek_use_platform_angles(platform_ds=ed["Platform"], ping_time_da=ds_Sv["ping_time"]) - ek_use_beam_angles(beam_ds=ed["Sonar/Beam_group1"]) - - # Set (group, variable) name pairs - group_variable_name_pairs = [ - ["Platform", "water_level"], - ["Platform", "vertical_offset"], - ["Platform", "transducer_offset_z"], - ["Platform", "pitch"], - ["Platform", "roll"], - ["Sonar/Beam_group1", "beam_direction_x"], - ["Sonar/Beam_group1", "beam_direction_y"], - ["Sonar/Beam_group1", "beam_direction_z"], - ] - - # Check if the expected warnings are logged - for group_name, variable_name in group_variable_name_pairs: - expected_warning = ( - f"The Echodata `{group_name}` group `{variable_name}` variable array contains " - "NaNs. This will result in NaNs in the final `depth` array. Consider filling the " - "NaNs and calling `.add_depth(...)` again." - ) - assert any(expected_warning in record.message for record in caplog.records) - - # Turn off logger verbosity - ep.utils.log.verbose(override=True) - - -@pytest.mark.integration -def test_add_depth_tilt_depth_use_arg_logger_warnings(caplog): - """ - Tests warnings when `tilt` and `depth_offset` are being passed in when other - `use_*` arguments are passed in as `True`. - """ - # Open EK Raw file and Compute Sv - ed = ep.open_raw( - "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", - sonar_model="EK80" - ) - ds_Sv = ep.calibrate.compute_Sv(ed, **{"waveform_mode":"CW", "encode_mode":"power"}) - - # Turn on logger verbosity - ep.utils.log.verbose(override=False) - - # Run `add_depth` with `tilt`, `depth_offset` as Non-NaN, using beam group angles, - # and platform vertical offset values - ep.consolidate.add_depth( - ds_Sv, - ed, - depth_offset=9.15, - tilt=0.1, - use_platform_vertical_offsets=True, - use_beam_angles=True, - ) - - # Check if the expected warnings are logged - depth_offset_warning = ( - "When `depth_offset` is specified, platform vertical offset " - "variables will not be used." - ) - tilt_warning = ( - "When `tilt` is specified, beam/platform angle variables will not be used." - ) - for warning in [depth_offset_warning, tilt_warning]: - assert any(warning in record.message for record in caplog.records) - - # Turn off logger verbosity - ep.utils.log.verbose(override=True) - - -@pytest.mark.integration -def test_add_depth_without_echodata(): - """ - Test `add_depth` without using Echodata Platform or Beam groups. - """ - # Build test Sv dataset - channel = ["channel_0", "channel_1", "channel_2"] - range_sample = np.arange(100) - ping_time = pd.date_range(start="2022-08-10T10:00:00", end="2022-08-10T12:00:00", periods=121) - sample_interval = 0.01 - ds_Sv = _build_ds_Sv(channel, range_sample, ping_time, sample_interval) - - # User input `depth_offset` - water_level = 10 - ds_Sv_depth = ep.consolidate.add_depth(ds_Sv, depth_offset=water_level) - assert ds_Sv_depth["depth"].equals(ds_Sv["echo_range"] + water_level) - - # User input `depth_offset` and `tilt` - tilt = 15 - ds_Sv_depth = ep.consolidate.add_depth(ds_Sv, depth_offset=water_level, tilt=tilt) - assert ds_Sv_depth["depth"].equals(ds_Sv["echo_range"] * np.cos(tilt / 180 * np.pi) + water_level) - - # Inverted echosounder with `depth_offset` and `tilt` - ds_Sv_depth = ep.consolidate.add_depth(ds_Sv, depth_offset=water_level, tilt=tilt, downward=False) - assert ds_Sv_depth["depth"].equals(-1 * ds_Sv["echo_range"] * np.cos(tilt / 180 * np.pi) + water_level) - - # Check history attribute - history_attribute = ds_Sv_depth["depth"].attrs["history"] - history_attribute_without_time = history_attribute[33:] - assert history_attribute_without_time == ". depth` calculated using: Sv `echo_range`." - - -@pytest.mark.integration -def test_add_depth_errors(): - """Check if all `add_depth` errors are raised appropriately.""" - # Open EK80 Raw file and Compute Sv - ed = ep.open_raw( - "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", - sonar_model="EK80" - ) - ds_Sv = ep.calibrate.compute_Sv(ed, **{"waveform_mode":"CW", "encode_mode":"power"}) - - # Test that all three errors are called: - with pytest.raises(ValueError, match=( - "If any of `use_platform_vertical_offsets`, " - + "`use_platform_angles` " - + "or `use_beam_angles` is `True`, " - + "then `echodata` cannot be `None`." - )): - ep.consolidate.add_depth(ds_Sv, None, use_platform_angles=True) - with pytest.raises(NotImplementedError, match=( - "Computing depth with both platform and beam angles is not implemented yet." - )): - ep.consolidate.add_depth(ds_Sv, ed, use_platform_angles=True, use_beam_angles=True) - with pytest.raises(NotImplementedError, match=( - "`use_platform/beam_...` not implemented yet for `AZFP`." - )): - ed["Sonar"].attrs["sonar_model"] = "AZFP" - ep.consolidate.add_depth(ds_Sv, ed, use_platform_angles=True) - - -@pytest.mark.integration -@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ - ( - "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", - "EK80", - {"waveform_mode":"BB", "encode_mode":"complex"} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", - "EK80", - {"waveform_mode":"CW", "encode_mode":"power"} - ) -]) -def test_add_depth_EK_with_platform_vertical_offsets(file, sonar_model, compute_Sv_kwargs): - """Test `depth` values when using EK Platform vertical offset values to compute it.""" - # Open EK Raw file and Compute Sv - ed = ep.open_raw(file, sonar_model=sonar_model) - ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) - - # Subset ds_Sv to include only first 5 `range_sample` coordinates - # since the test takes too long to iterate through every value - ds_Sv = ds_Sv.isel(range_sample=slice(0,5)) - - # Replace any Platform Vertical Offset NaN values with 0 - ed["Platform"]["water_level"] = ed["Platform"]["water_level"].fillna(0) - ed["Platform"]["vertical_offset"] = ed["Platform"]["vertical_offset"].fillna(0) - ed["Platform"]["transducer_offset_z"] = ed["Platform"]["transducer_offset_z"].fillna(0) - - # Compute `depth` using platform vertical offset values - ds_Sv_with_depth = ep.consolidate.add_depth(ds_Sv, ed, use_platform_vertical_offsets=True) - - # Check history attribute - history_attribute = ds_Sv_with_depth["depth"].attrs["history"] - history_attribute_without_time = history_attribute[33:] - assert history_attribute_without_time == ( - ". depth` calculated using: Sv `echo_range`, Echodata `Platform` Vertical Offsets." - ) - - # Compute transducer depth - transducer_depth = ek_use_platform_vertical_offsets(ed["Platform"], ds_Sv["ping_time"]) - - # Check if depth value is equal to corresponding `echo_range` value + transducer depth value - assert np.allclose( - ds_Sv_with_depth["depth"].data, - (ds_Sv["echo_range"] + transducer_depth).data, - rtol=1e-10, - atol=1e-10 - ) - - -@pytest.mark.integration -@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ - ( - "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", - "EK80", - {"waveform_mode":"BB", "encode_mode":"complex"} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", - "EK80", - {"waveform_mode":"CW", "encode_mode":"power"} - ) -]) -def test_add_depth_EK_with_platform_angles(file, sonar_model, compute_Sv_kwargs): - """Test `depth` values when using EK Platform angles to compute it.""" - # Open EK Raw file and Compute Sv - ed = ep.open_raw(file, sonar_model=sonar_model) - ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) - - # Replace any Beam Angle NaN values with 0 - ed["Platform"]["pitch"] = ed["Platform"]["pitch"].fillna(0) - ed["Platform"]["roll"] = ed["Platform"]["roll"].fillna(0) - - # Compute `depth` using platform angle values - ds_Sv_with_depth = ep.consolidate.add_depth(ds_Sv, ed, use_platform_angles=True) - - # Check history attribute - history_attribute = ds_Sv_with_depth["depth"].attrs["history"] - history_attribute_without_time = history_attribute[33:] - assert history_attribute_without_time == ( - ". depth` calculated using: Sv `echo_range`, Echodata `Platform` Angles." - ) - - # Compute transducer depth - echo_range_scaling = ek_use_platform_angles(ed["Platform"], ds_Sv["ping_time"]) - - # Check if depth is equal to echo range scaling value * echo range - assert np.allclose( - ds_Sv_with_depth["depth"].data, - (echo_range_scaling * ds_Sv["echo_range"]).transpose("channel", "ping_time", "range_sample").data, - equal_nan=True - ) - - -@pytest.mark.integration -@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs", [ - ( - "echopype/test_data/ek60/NBP_B050N-D20180118-T090228.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek60/ncei-wcsd/Summer2017-D20170620-T021537.raw", - "EK60", - {} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH1707/Reduced_D20170826-T205615.raw", - "EK80", - {"waveform_mode":"BB", "encode_mode":"complex"} - ), - ( - "echopype/test_data/ek80/ncei-wcsd/SH2106/EK80/Reduced_Hake-D20210701-T131621.raw", - "EK80", - {"waveform_mode":"CW", "encode_mode":"power"} - ) -]) -def test_add_depth_EK_with_beam_angles(file, sonar_model, compute_Sv_kwargs): - """Test `depth` values when using EK Beam angles to compute it.""" - # Open EK Raw file and Compute Sv - ed = ep.open_raw(file, sonar_model=sonar_model) - ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) - - # Replace Beam Angle NaN values - ed["Sonar/Beam_group1"]["beam_direction_x"] = ed["Sonar/Beam_group1"]["beam_direction_x"].fillna(0) - ed["Sonar/Beam_group1"]["beam_direction_y"] = ed["Sonar/Beam_group1"]["beam_direction_y"].fillna(0) - ed["Sonar/Beam_group1"]["beam_direction_z"] = ed["Sonar/Beam_group1"]["beam_direction_z"].fillna(1) - - # Compute `depth` using beam angle values - ds_Sv_with_depth = ep.consolidate.add_depth(ds_Sv, ed, use_beam_angles=True) - - # Check history attribute - history_attribute = ds_Sv_with_depth["depth"].attrs["history"] - history_attribute_without_time = history_attribute[33:] - assert history_attribute_without_time == ( - ". depth` calculated using: Sv `echo_range`, Echodata `Beam_group1` Angles." - ) - - # Compute echo range scaling values - echo_range_scaling = ek_use_beam_angles(ed["Sonar/Beam_group1"]) - - # Check if depth is equal to echo range scaling value * echo range - assert np.allclose( - ds_Sv_with_depth["depth"].data, - (echo_range_scaling * ds_Sv["echo_range"]).transpose("channel", "ping_time", "range_sample").data, - equal_nan=True - ) - - -@pytest.mark.integration -@pytest.mark.parametrize("file, sonar_model, compute_Sv_kwargs, expected_beam_group_name", [ - ( - "echopype/test_data/ek80/Summer2018--D20180905-T033113.raw", - "EK80", - {"waveform_mode":"BB", "encode_mode":"complex"}, - "Beam_group1" - ), - ( - "echopype/test_data/ek80/Summer2018--D20180905-T033113.raw", - "EK80", - {"waveform_mode":"CW", "encode_mode":"power"}, - "Beam_group2" - ) -]) -def test_add_depth_EK_with_beam_angles_with_different_beam_groups( - file, sonar_model, compute_Sv_kwargs, expected_beam_group_name -): - """ - Test `depth` channel when using EK Beam angles from two separate calibrated - Sv datasets (that are from the same raw file) using two differing pairs of - calibration key word arguments. The two tests should correspond to different - beam groups i.e. beam group 1 and beam group 2. - """ - # Open EK Raw file and Compute Sv - ed = ep.open_raw(file, sonar_model=sonar_model) - ds_Sv = ep.calibrate.compute_Sv(ed, **compute_Sv_kwargs) - - # Compute `depth` using beam angle values - ds_Sv = ep.consolidate.add_depth(ds_Sv, ed, use_beam_angles=True) - - # Check history attribute - history_attribute = ds_Sv["depth"].attrs["history"] - history_attribute_without_time = history_attribute[33:] - assert history_attribute_without_time == ( - f". depth` calculated using: Sv `echo_range`, Echodata `{expected_beam_group_name}` Angles." - ) - - def _create_array_list_from_echoview_mats(paths_to_echoview_mat: List[pathlib.Path]) -> List[np.ndarray]: """ Opens each mat file in ``paths_to_echoview_mat``, selects the first ``ping_time``, diff --git a/echopype/tests/utils/test_align.py b/echopype/tests/utils/test_align.py new file mode 100644 index 000000000..5d5497752 --- /dev/null +++ b/echopype/tests/utils/test_align.py @@ -0,0 +1,281 @@ +import xarray as xr +import numpy as np +import pytest + +import echopype as ep +from echopype.utils.align import align_to_ping_time + + +@pytest.mark.unit +@pytest.mark.parametrize( + "method, expected_aligned_da", + [ + ( + "nearest", + xr.DataArray( + [0.0, 2.0, 3.0], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + ), + ( + "linear", + xr.DataArray( + [0.5, 2.0, 3.5], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + ), + ] +) +def test_align_to_ping_time_values(method, expected_aligned_da): + """ + Test 'nearest' and 'linear' `align_to_ping_time` output values on mock external and + ping time DataArrays. + """ + # Create external and sounder DataArrays + external_da = xr.DataArray( + [0, 1, 2, 3], + coords={ + "time1": np.array( + ["2017-06-20T01:10:00", "2017-06-20T01:20:00", "2017-06-20T01:30:00", "2017-06-20T01:40:00"], + dtype="datetime64[ns]" + ) + }, + dims=["time1"] + ) + sounder_da = xr.DataArray( + [10, 20, 30], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + + # Align external DataArray + aligned_da = align_to_ping_time(external_da, "time1", sounder_da["ping_time"], method=method) + + # Check that expected_aligned_da equals aligned_da + expected_aligned_da.equals(aligned_da) + + + +@pytest.mark.unit +@pytest.mark.parametrize( + "external_da, expected_aligned_da", + [ + ( + xr.DataArray( + [2.0], + coords={ + "time1": np.array( + ["2017-06-20T01:10:00"], # Before earliest ping time + dtype="datetime64[ns]" + ) + }, + dims=["time1"] + ), + xr.DataArray( + [2.0, 2.0, 2.0], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + ), + ( + xr.DataArray( + [2.0], + coords={ + "time1": np.array( + ["2017-06-20T01:20:00"], # Between min and max ping times + dtype="datetime64[ns]" + ) + }, + dims=["time1"] + ), + xr.DataArray( + [2.0, 2.0, 2.0], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + ), + ( + xr.DataArray( + [2.0], + coords={ + "time1": np.array( + ["2017-06-20T01:50:00"], # After latest ping time + dtype="datetime64[ns]" + ) + }, + dims=["time1"] + ), + xr.DataArray( + [2.0, 2.0, 2.0], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + ), + ( + xr.DataArray( + [], + coords={ + "time1": np.array([]) + }, + dims=["time1"] + ), + xr.DataArray( + [np.nan, np.nan, np.nan], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + ), + ] +) +def test_align_to_ping_time_values_length_1_and_0(external_da, expected_aligned_da): + """ + Test `align_to_ping_time` output values on mock external and + ping time DataArrays that have length 1 or length 0 on the time + dimension. + """ + # Create sounder DataArray + sounder_da = xr.DataArray( + [10, 20, 30], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + + # Align external DataArray + aligned_da = align_to_ping_time(external_da, "time1", sounder_da["ping_time"]) + + # Check that expected_aligned_da equals aligned_da + expected_aligned_da.equals(aligned_da) + + +@pytest.mark.unit +def test_align_to_ping_time_values_when_matching_time_values(): + """ + Test 'nearest' and 'linear' `align_to_ping_time` output values on mock external and + ping time DataArrays where the time values of the mock external and the ping time DataArrays + are equal. + """ + # Create external, sounder, and expected aligned output DataArrays + external_da = xr.DataArray( + [0, 1, 2], + coords={ + "time1": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["time1"] + ) + sounder_da = xr.DataArray( + [10, 20, 30], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + expected_aligned_da = xr.DataArray( + [0, 1, 2], + coords={ + "ping_time": np.array( + ["2017-06-20T01:15:00", "2017-06-20T01:30:00", "2017-06-20T01:45:00"], + dtype="datetime64[ns]" + ) + }, + dims=["ping_time"] + ) + + # Align external DataArray + aligned_da = align_to_ping_time(external_da, "time1", sounder_da["ping_time"]) + + # Check that sounder ping time equals aligned_da + expected_aligned_da.equals(aligned_da) + + +@pytest.mark.integration +def test_align_to_ping_time_glider_azfp(): + """ + Test aligning external Glider pitch data to Echosounder ping time. + """ + # Open RAW and extract ping time + ping_time_da = ep.open_raw( + raw_file="echopype/test_data/azfp/rutgers_glider_external_nc/18011107.01A", + xml_path="echopype/test_data/azfp/rutgers_glider_external_nc/18011107.XML", + sonar_model="azfp" + )["Sonar/Beam_group1"]["ping_time"] + + # Open external glider dataset + glider_ds = xr.open_dataset( + "echopype/test_data/azfp/rutgers_glider_external_nc/ru32-20180109T0531-profile-sci-delayed-subset.nc", + engine="netcdf4" + ) + + # Drop NaNs from pitch and align data array + aligned_da = align_to_ping_time(glider_ds["m_pitch"].dropna("time"), "time", ping_time_da) + + # Check that all values in the aligned DataArray are non-NaN + assert np.all(~np.isnan(aligned_da)) + + # Test if aligned_da matches interpolation + assert np.allclose( + aligned_da, + glider_ds["m_pitch"].dropna("time").interp( + {"time": ping_time_da}, + method="nearest", + kwargs={"fill_value": "extrapolate"}, + ), + equal_nan=True, + ) + + # To check that the correct extrapolation calculation was made, we take all values with times + # before the first non-NaN pitch value and check that there is a single unique value and that + # this single unique value matches the first non-NaN pitch value. Extrapolation was done on the + # 'left-side' of this array because the pitch data starts before the Echosounder started pinging. + first_non_NaN_pitch = glider_ds["m_pitch"].dropna("time").isel(time=0) + assert np.isclose( + first_non_NaN_pitch.values, + np.unique(aligned_da.where(first_non_NaN_pitch["time"].values > aligned_da["ping_time"], drop=True)) + ) diff --git a/echopype/utils/align.py b/echopype/utils/align.py new file mode 100644 index 000000000..37373c624 --- /dev/null +++ b/echopype/utils/align.py @@ -0,0 +1,61 @@ +import numpy as np +import xarray as xr + + +def align_to_ping_time( + external_da: xr.DataArray, + external_time_name: str, + ping_time_da: xr.DataArray, + method: str = "nearest", +) -> xr.DataArray: + """ + Aligns an external DataArray to align time-wise with the echosounder ping time DataArray. + + Parameters + ---------- + external_da : xr.DataArray + External non-echosounder data. + external_time_name : str + Time variable name of the external non-echosounder data. + ping_time_da : xr.DataArray + Echosounder ping time. + method : str, default 'nearest' + Interpolation method. Not used if external time matches ping time or external + DataArray is a single value. + For more interpolation methods please visit: https://docs.xarray.dev/en/stable/generated/xarray.DataArray.interp.html # noqa + + Returns + ------- + aligned_da : xr.DataArray + External non-echosounder data that is now aligned with the echosounder ping time. + """ + # Rename if the external time dimension is equal to ping time + if ping_time_da.equals( + external_da[external_time_name].rename({external_time_name: "ping_time"}) + ): + return external_da.rename({external_time_name: "ping_time"}) + elif len(external_da[external_time_name]) == 1: + # Extend single, fixed-location coordinate to match ping time length + return xr.DataArray( + data=external_da.values[0] * np.ones(len(ping_time_da), dtype=np.float64), + dims=["ping_time"], + coords={"ping_time": ping_time_da.values}, + attrs=external_da.attrs, + ) + elif len(external_da[external_time_name]) == 0: + # Create an all NaN array matching the length of the ping time array + data = np.full(len(ping_time_da), np.nan, dtype=np.float64) + return xr.DataArray( + data=data, + dims=["ping_time"], + coords={"ping_time": ping_time_da.values}, + attrs=external_da.attrs, + ) + else: + return external_da.interp( + {external_time_name: ping_time_da}, + method=method, + # More details for `fill_value` and `extrapolate` can be found here: + # https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.interp1d.html # noqa + kwargs={"fill_value": "extrapolate"}, + ).drop_vars(external_time_name)