Skip to content

Commit

Permalink
Merge pull request #199 from Open-EO/issue154
Browse files Browse the repository at this point in the history
Issue154; fix band pushdown
  • Loading branch information
jdries committed Jun 1, 2023
2 parents 6a0c0bb + 377f8dc commit fdf43b0
Show file tree
Hide file tree
Showing 3 changed files with 651 additions and 13 deletions.
29 changes: 16 additions & 13 deletions openeo_driver/dry_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,16 +357,18 @@ def get_source_constraints(self, merge=True) -> List[SourceConstraint]:
if subgraph_without_blocking_processes is not None:
leaf_without_blockers = subgraph_without_blocking_processes


# 2 merge filtering arguments
args = leaf_without_blockers.get_arguments_by_operation(op)
if args:
if merge:
# Take first item (to reproduce original behavior)
# TODO: take temporal/spatial/categorical intersection instead?
# see https://github.com/Open-EO/openeo-processes/issues/201
constraints[op] = args[0]
else:
constraints[op] = args
if leaf_without_blockers is not None:
args = leaf_without_blockers.get_arguments_by_operation(op)
if args:
if merge:
# Take first item (to reproduce original behavior)
# TODO: take temporal/spatial/categorical intersection instead?
# see https://github.com/Open-EO/openeo-processes/issues/201
constraints[op] = args[0]
else:
constraints[op] = args

if "_weak_spatial_extent" in constraints:
if "spatial_extent" not in constraints:
Expand Down Expand Up @@ -483,7 +485,7 @@ def merge_cubes(self, other: 'DryRunDataCube', overlap_resolver) -> 'DryRunDataC
traces=self._traces + other._traces, data_tracer=self._data_tracer,
# TODO: properly merge (other) metadata?
metadata=self.metadata
)
)._process("merge_cubes", arguments={})

def mask_polygon(self, mask, replacement=None, inside: bool = False) -> 'DriverDataCube':
cube = self
Expand Down Expand Up @@ -572,7 +574,7 @@ def reduce_dimension(
# TODO: reduce is not necessarily global in call cases
dc = self._process("process_type", [ProcessType.GLOBAL_TIME])

return dc._process_metadata(self.metadata.reduce_dimension(dimension_name=dimension))
return dc._process_metadata(self.metadata.reduce_dimension(dimension_name=dimension))._process("reduce_dimension", arguments={})

def chunk_polygon(
self, reducer, chunks: MultiPolygon, mask_value: float, env: EvalEnv, context: Optional[dict] = None
Expand Down Expand Up @@ -637,15 +639,16 @@ def apply_tiles_spatiotemporal(self, process, context: Optional[dict] = None) ->
def apply_neighborhood(
self, process, *, size: List[dict], overlap: List[dict], context: Optional[dict] = None, env: EvalEnv
) -> "DriverDataCube":
cube = self._process("apply_neighborhood", {})
temporal_size = temporal_overlap = None
size_dict = {e['dimension']: e for e in size}
overlap_dict = {e['dimension']: e for e in overlap}
if self.metadata.has_temporal_dimension():
temporal_size = size_dict.get(self.metadata.temporal_dimension.name, None)
temporal_overlap = overlap_dict.get(self.metadata.temporal_dimension.name, None)
if temporal_size is None or temporal_size.get('value', None) is None:
return self._process("process_type", [ProcessType.GLOBAL_TIME])
return self
return cube._process("process_type", [ProcessType.GLOBAL_TIME])
return cube

def atmospheric_correction(self, method: str = None, *args) -> 'DriverDataCube':
method_link = "https://remotesensing.vito.be/case/icor"
Expand Down
Loading

0 comments on commit fdf43b0

Please sign in to comment.