Skip to content

Commit

Permalink
Cleanup.
Browse files Browse the repository at this point in the history
  • Loading branch information
SpacemanPaul committed Sep 20, 2024
1 parent 06b3606 commit 766c504
Showing 1 changed file with 23 additions and 19 deletions.
42 changes: 23 additions & 19 deletions datacube/drivers/postgis/_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,29 @@ def extract_dataset_fields(ds_metadata, fields):
return result


# Min/Max aggregating time fields for temporal_extent methods
time_min = DateDocField('acquisition_time_min',
'Min of time when dataset was acquired',
Dataset.metadata_doc,
False, # is it indexed
offset=[
['properties', 'dtr:start_datetime'],
['properties', 'datetime']
],
selection='least')


time_max = DateDocField('acquisition_time_max',
'Max of time when dataset was acquired',
Dataset.metadata_doc,
False, # is it indexed
offset=[
['properties', 'dtr:end_datetime'],
['properties', 'datetime']
],
selection='greatest')


class PostgisDbAPI:
def __init__(self, parentdb, connection):
self._db = parentdb
Expand Down Expand Up @@ -1512,25 +1535,6 @@ def temporal_extent_by_ids(self, ids: Iterable[DSID]) -> tuple[datetime.datetime
return (self.time_min.normalise_value(tmin), self.time_max.normalise_value(tmax))
raise ValueError("no dataset ids provided")

time_min = DateDocField('acquisition_time_min',
'Min of time when dataset was acquired',
Dataset.metadata_doc,
False, # is it indexed
offset=[
['properties', 'dtr:start_datetime'],
['properties', 'datetime']
],
selection='least')

time_max = DateDocField('acquisition_time_max',
'Max of time when dataset was acquired',
Dataset.metadata_doc,
False, # is it indexed
offset=[
['properties', 'dtr:end_datetime'],
['properties', 'datetime']
],
selection='greatest')

def temporal_extent_full(self) -> Select:
# Hardcode eo3 standard time locations - do not use this approach in a legacy index driver.
Expand Down

0 comments on commit 766c504

Please sign in to comment.