Skip to content

Commit

Permalink
merge with develop
Browse files Browse the repository at this point in the history
  • Loading branch information
mathleur committed Sep 7, 2023
2 parents f82c8e8 + 2907486 commit d0108da
Showing 22 changed files with 227 additions and 176 deletions.
15 changes: 1 addition & 14 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
@@ -1,25 +1,19 @@
name: ci

on:
# Trigger the workflow on push to master or develop, except tag creation
push:
branches:
- 'main'
- 'develop'

# Trigger the workflow on pull request
pull_request: ~

# Trigger the workflow manually
workflow_dispatch: ~

# Trigger after public PR approved for CI
pull_request_target:
types: [labeled]

release:
types: [created]

jobs:
qa:
name: qa
@@ -48,7 +42,6 @@ jobs:

- name: Check flake8
run: flake8 .

setup:
name: setup
runs-on: ubuntu-20.04
@@ -95,7 +88,6 @@ jobs:
run: |
echo inputs=$(echo "${{ inputs.build_package_inputs || '{}' }}" | yq eval '.' --output-format json --indent 0 -) >> $GITHUB_OUTPUT
echo inputs-for-ubuntu=$(echo "${{ inputs.build_package_inputs || '{}' }}" | yq eval '. * {"os":"ubuntu-20.04","compiler":"gnu-10","compiler_cc":"gcc-10","compiler_cxx":"g++-10","compiler_fc":"gfortran-10"}' --output-format json --indent 0 -) >> $GITHUB_OUTPUT
test:
name: test
needs:
@@ -150,16 +142,11 @@ jobs:
uses: codecov/codecov-action@v3
with:
files: coverage.xml

deploy:
needs: test

if: ${{ github.event_name == 'release' }}

name: Upload to Pypi

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- name: Set up Python
@@ -176,4 +163,4 @@ jobs:
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python setup.py sdist
twine upload dist/*
twine upload dist/*
12 changes: 4 additions & 8 deletions polytope/datacube/backends/FDB_datacube.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,8 @@
import os
from copy import deepcopy

from .datacube import Datacube, IndexTree
import pyfdb

# TODO: probably need to do this more general...
os.environ["DYLD_LIBRARY_PATH"] = "/Users/male/build/fdb-bundle/lib"
os.environ["FDB_HOME"] = "/Users/male/git/fdb-home"
import pyfdb # noqa: E402
from .datacube import Datacube, IndexTree


def glue(path, unmap_path):
@@ -23,12 +19,12 @@ def __init__(self, config={}, axis_options={}):
self.axis_options = axis_options
self.grid_mapper = None
self.axis_counter = 0
self._axes = {}
self._axes = None
treated_axes = []
self.non_complete_axes = []
self.complete_axes = []
self.blocked_axes = []
self.transformation = {}
self.transformation = None
self.fake_axes = []

partial_request = config
8 changes: 6 additions & 2 deletions polytope/datacube/backends/datacube.py
Original file line number Diff line number Diff line change
@@ -43,7 +43,9 @@ def _create_axes(self, name, values, transformation_type_key, transformation_opt

# first need to change the values so that we have right type
values = transformation.change_val_type(axis_name, values)
if axis_name not in self._axes.keys():
if self._axes is None:
DatacubeAxis.create_standard(axis_name, values, self)
elif axis_name not in self._axes.keys():
DatacubeAxis.create_standard(axis_name, values, self)
# add transformation tag to axis, as well as transformation options for later
setattr(self._axes[axis_name], has_transform[transformation_type_key], True) # where has_transform is a
@@ -66,7 +68,9 @@ def _check_and_add_axes(self, options, name, values):
self._add_all_transformation_axes(options, name, values)
else:
if name not in self.blocked_axes:
if name not in self._axes.keys():
if self._axes is None:
DatacubeAxis.create_standard(name, values, self)
elif name not in self._axes.keys():
DatacubeAxis.create_standard(name, values, self)

def has_index(self, path: DatacubePath, axis, index):
4 changes: 2 additions & 2 deletions polytope/datacube/backends/xarray.py
Original file line number Diff line number Diff line change
@@ -12,13 +12,13 @@ def __init__(self, dataarray: xr.DataArray, axis_options={}):
self.axis_options = axis_options
self.grid_mapper = None
self.axis_counter = 0
self._axes = {}
self._axes = None
self.dataarray = dataarray
treated_axes = []
self.non_complete_axes = []
self.complete_axes = []
self.blocked_axes = []
self.transformation = {}
self.transformation = None
self.fake_axes = []
for name, values in dataarray.coords.variables.items():
if name in dataarray.dims:
100 changes: 37 additions & 63 deletions polytope/datacube/datacube_axis.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from abc import ABC, abstractmethod, abstractproperty
from abc import ABC, abstractmethod
from copy import deepcopy
from typing import Any, List

@@ -243,7 +243,9 @@ def unmap_total_path_to_datacube(path, unmapped_path):
# if we are on the first axis, then need to add the first val to unmapped_path
first_val = path.get(cls.name, None)
path.pop(cls.name, None)
if cls.name not in unmapped_path:
if unmapped_path is None:
unmapped_path[cls.name] = first_val
elif cls.name not in unmapped_path:
# if for some reason, the unmapped_path already has the first axis val, then don't update
unmapped_path[cls.name] = first_val
if cls.name == transformation._mapped_axes()[1]:
@@ -278,14 +280,11 @@ def find_indices_between(index_ranges, low, up, datacube, method=None):
end = idxs.index(up)
start = max(start - 1, 0)
end = min(end + 1, len(idxs))
# indexes_between = [i for i in indexes if low <= i <= up]
indexes_between = idxs[start:end]
indexes_between_ranges.append(indexes_between)
else:
indexes_between = [i for i in idxs if low <= i <= up]
indexes_between_ranges.append(indexes_between)
# indexes_between = [i for i in idxs if low <= i <= up]
# indexes_between_ranges.append(indexes_between)
return indexes_between_ranges

old_remap = cls.remap
@@ -362,14 +361,11 @@ def find_indices_between(index_ranges, low, up, datacube, method=None):
end = indexes.index(up)
start = max(start - 1, 0)
end = min(end + 1, len(indexes))
# indexes_between = [i for i in indexes if low <= i <= up]
indexes_between = indexes[start:end]
indexes_between_ranges.append(indexes_between)
else:
indexes_between = [i for i in indexes if low <= i <= up]
indexes_between_ranges.append(indexes_between)
# indexes_between = [i for i in indexes if low <= i <= up]
# indexes_between_ranges.append(indexes_between)
return indexes_between_ranges

def remap(range):
@@ -437,25 +433,11 @@ def find_indices_between(index_ranges, low, up, datacube, method=None):
end = indexes.index(up)
start = max(start - 1, 0)
end = min(end + 1, len(indexes))
# indexes_between = [i for i in indexes if low <= i <= up]
indexes_between = indexes[start:end]
indexes_between_ranges.append(indexes_between)
else:
indexes_between = [i for i in indexes if low <= i <= up]
indexes_between_ranges.append(indexes_between)
# if method == "surrounding":
# start = indexes.index(low)
# end = indexes.index(up)
# start = max(start-1, 0)
# end = min(end+1, len(indexes))
# # indexes_between = [i for i in indexes if low <= i <= up]
# indexes_between = indexes[start:end]
# indexes_between_ranges.append(indexes_between)
# else:
# indexes_between = [i for i in indexes if low <= i <= up]
# indexes_between_ranges.append(indexes_between)
# indexes_between = [i for i in indexes if low <= i <= up]
# indexes_between_ranges.append(indexes_between)
return indexes_between_ranges

def remap(range):
@@ -527,14 +509,11 @@ def find_indices_between(index_ranges, low, up, datacube, method=None):
end = indexes.index(up)
start = max(start - 1, 0)
end = min(end + 1, len(indexes))
# indexes_between = [i for i in indexes if low <= i <= up]
indexes_between = indexes[start:end]
indexes_between_ranges.append(indexes_between)
else:
indexes_between = [i for i in indexes if low <= i <= up]
indexes_between_ranges.append(indexes_between)
# indexes_between = [i for i in indexes if low <= i <= up]
# indexes_between_ranges.append(indexes_between)
return indexes_between_ranges

def remap(range):
@@ -562,18 +541,6 @@ def update_axis(self):
self = cyclic(self)
return self

@abstractproperty
def name(self) -> str:
pass

@abstractproperty
def tol(self) -> Any:
pass

@abstractproperty
def range(self) -> List[Any]:
pass

# Convert from user-provided value to CONTINUOUS type (e.g. float, pd.timestamp)
@abstractmethod
def parse(self, value: Any) -> Any:
@@ -645,7 +612,6 @@ def find_indices_between(self, index_ranges, low, up, datacube, method=None):
end = indexes.index(up)
start = max(start - 1, 0)
end = min(end + 1, len(indexes))
# indexes_between = [i for i in indexes if low <= i <= up]
indexes_between = indexes[start:end]
indexes_between_ranges.append(indexes_between)
else:
@@ -657,7 +623,10 @@ def find_indices_between(self, index_ranges, low, up, datacube, method=None):
def create_standard(name, values, datacube):
values = np.array(values)
DatacubeAxis.check_axis_type(name, values)
datacube._axes[name] = deepcopy(_type_to_axis_lookup[values.dtype.type])
if datacube._axes is None:
datacube._axes = {name: deepcopy(_type_to_axis_lookup[values.dtype.type])}
else:
datacube._axes[name] = deepcopy(_type_to_axis_lookup[values.dtype.type])
datacube._axes[name].name = name
datacube.axis_counter += 1

@@ -673,11 +642,12 @@ def check_axis_type(name, values):
@mapper
@type_change
class IntDatacubeAxis(DatacubeAxis):
name = None
tol = 1e-12
range = None
transformations = []
type = 0
def __init__(self):
self.name = None
self.tol = 1e-12
self.range = None
self.transformations = []
self.type = 0

def parse(self, value: Any) -> Any:
return float(value)
@@ -697,11 +667,12 @@ def serialize(self, value):
@mapper
@type_change
class FloatDatacubeAxis(DatacubeAxis):
name = None
tol = 1e-12
range = None
transformations = []
type = 0.0
def __init__(self):
self.name = None
self.tol = 1e-12
self.range = None
self.transformations = []
self.type = 0.0

def parse(self, value: Any) -> Any:
return float(value)
@@ -718,11 +689,12 @@ def serialize(self, value):

@merge
class PandasTimestampDatacubeAxis(DatacubeAxis):
name = None
tol = 1e-12
range = None
transformations = []
type = pd.Timestamp("2000-01-01T00:00:00")
def __init__(self):
self.name = None
self.tol = 1e-12
self.range = None
self.transformations = []
self.type = pd.Timestamp("2000-01-01T00:00:00")

def parse(self, value: Any) -> Any:
if isinstance(value, np.str_):
@@ -747,11 +719,12 @@ def offset(self, value):

@merge
class PandasTimedeltaDatacubeAxis(DatacubeAxis):
name = None
tol = 1e-12
range = None
transformations = []
type = np.timedelta64(0, "s")
def __init__(self):
self.name = None
self.tol = 1e-12
self.range = None
self.transformations = []
self.type = np.timedelta64(0, "s")

def parse(self, value: Any) -> Any:
if isinstance(value, np.str_):
@@ -776,10 +749,11 @@ def offset(self, value):

@type_change
class UnsliceableDatacubeAxis(DatacubeAxis):
name = None
tol = float("NaN")
range = None
transformations = []
def __init__(self):
self.name = None
self.tol = float("NaN")
self.range = None
self.transformations = []

def parse(self, value: Any) -> Any:
return value
4 changes: 2 additions & 2 deletions polytope/datacube/transformations/datacube_mappers.py
Original file line number Diff line number Diff line change
@@ -2762,14 +2762,14 @@ def axes_idx_to_octahedral_idx(self, first_idx, second_idx):
else:
for i in range(first_idx - 1):
if i <= self._resolution - 1:
octa_idx += 16 + 4 * i
octa_idx += 20 + 4 * i
else:
i = i - self._resolution + 1
if i == 1:
octa_idx += 16 + 4 * self._resolution
else:
i = i - 1
octa_idx += 16 + 4 * (self._resolution - i + 1)
octa_idx += 16 + 4 * (self._resolution - i)
octa_idx += second_idx
return octa_idx

Loading

0 comments on commit d0108da

Please sign in to comment.