diff --git a/.github/workflows/git-sync.yml b/.github/workflows/git-sync.yml new file mode 100644 index 0000000..d1fca96 --- /dev/null +++ b/.github/workflows/git-sync.yml @@ -0,0 +1,43 @@ +name: Sync with Original Repo +on: + schedule: + - cron: "0 0 * * Sun" + + + +jobs: + sync: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Fetch upstream + run: | + git remote add upstream https://github.com/multiply-org/KaFKA-InferenceEngine.git + git fetch upstream + + - name: Check if there are changes + id: has_changes + run: echo "::set-output name=changed::$(git rev-parse HEAD != upstream/master)" + + - name: Create new branch + run: | + git checkout -b update-upstream-${{ github.run_number }} + if: steps.has_changes.outputs.changed == 'true' + + - name: Merge upstream changes + run: | + git merge upstream/master --no-edit + if: steps.has_changes.outputs.changed == 'true' + + - name: Create pull request + uses: peter-evans/create-pull-request@v3 + if: steps.has_changes.outputs.changed == 'true' + with: + title: 'Update from upstream repository' + commit-message: 'Merge latest changes from upstream repository' + branch: 'update-upstream' + base: 'master' + delete-branch: true diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml new file mode 100644 index 0000000..be0667a --- /dev/null +++ b/.github/workflows/run-tests.yml @@ -0,0 +1,38 @@ +name: make +on: + pull_request: + push: + schedule: + - cron: '0 0 2 * *' + +jobs: + build: + strategy: + fail-fast: false + max-parallel: 5 + matrix: + environment_file: [environment.yml] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: '3.10' + - name: Add conda to system path + run: | + # $CONDA is an environment variable pointing to the root of the miniconda directory + echo $CONDA/bin >> $GITHUB_PATH + + - name: Install dependencies + run: | + conda env update --file ${{ matrix.environment_file }} --name base + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + - name: Test with pytest + run: | + pytest diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000..de7c55a --- /dev/null +++ b/environment.yml @@ -0,0 +1,35 @@ +name: multiply-KaFKA-inference-engine + +channels: + - conda-forge + - defaults + +dependencies: + - python=>3.8 + - gdal=2.4 + - libgdal + - pyproj + - sentinelhub + - pyyaml + - numpy + - scipy + - setuptools + - gcc=12.1.0 + - cxx-compiler + - matplotlib + - libgcc + - libstdcxx-ng + - lazrs-python + - laspy + - pyyaml + - setuptools + - pytest + - flake8 + - scipy + - shapely + - pytest-shutil + - pip + - pip: + - . + - git+https://github.com/QCDIS/BRDF_descriptors.git + - git+https://github.com/QCDIS/atmospheric_correction.git diff --git a/kafka/inference/utils.py b/kafka/inference/utils.py index 1a43e4b..75c470a 100644 --- a/kafka/inference/utils.py +++ b/kafka/inference/utils.py @@ -32,7 +32,10 @@ import scipy.sparse.linalg as spl import datetime as dt import os -import gdal +try: + from osgeo import gdal +except ImportError: + import gdal import logging LOG = logging.getLogger(__name__) @@ -340,7 +343,7 @@ def spsolve2(a, b): a_lu = spl.splu(a.tocsc()) # LU decomposition for sparse a out = sp.lil_matrix((a.shape[1], b.shape[1]), dtype=np.float32) b_csc = b.tocsc() - for j in xrange(b.shape[1]): + for j in range(b.shape[1]): bb = np.array(b_csc[j, :].todense()).squeeze() out[j, j] = a_lu.solve(bb)[j] return out.tocsr() diff --git a/kafka/input_output/Sentinel1_Observations.py b/kafka/input_output/Sentinel1_Observations.py index e9107d6..b4cedcf 100644 --- a/kafka/input_output/Sentinel1_Observations.py +++ b/kafka/input_output/Sentinel1_Observations.py @@ -7,11 +7,17 @@ import os from collections import namedtuple -import gdal +try: + from osgeo import gdal +except ImportError: + import gdal import numpy as np -import osr +try: + from osgeo import osr +except ImportError: + import osr import scipy.sparse as sp diff --git a/kafka/input_output/Sentinel2_Observations.py b/kafka/input_output/Sentinel2_Observations.py index 71c889d..a0bc816 100644 --- a/kafka/input_output/Sentinel2_Observations.py +++ b/kafka/input_output/Sentinel2_Observations.py @@ -7,8 +7,15 @@ import numpy as np import scipy.sparse as sp # Required for unc -import gdal -import osr +try: + from osgeo import gdal +except ImportError: + import gdal + +try: + from osgeo import osr +except ImportError: + import osr import xml.etree.ElementTree as ET from collections import namedtuple diff --git a/kafka/input_output/observations.py b/kafka/input_output/observations.py index d0b25ba..28cf233 100644 --- a/kafka/input_output/observations.py +++ b/kafka/input_output/observations.py @@ -38,18 +38,22 @@ MCD43A1/2 -> See BRDF_descriptors! """ -import datetime import _pickle as cPickle +import datetime import glob import os from collections import namedtuple from BRDF_descriptors import RetrieveBRDFDescriptors -import gdal +try: + from osgeo import gdal +except ImportError: + import gdal + +from SIAC import kernels import numpy as np -# from kernels import Kernels import scipy.sparse as sp from scipy.ndimage import zoom @@ -134,7 +138,7 @@ def get_band_data(self, the_date, band_no): vza = zoom(vza, 2, order=0) sza = zoom(sza, 2, order=0) mask = zoom(mask, 2, order=0) - K = Kernels(vza, sza, raa, LiType="Sparse", doIntegrals=False, + K = kernels.Kernels(vza, sza, raa, LiType="Sparse", doIntegrals=False, normalise=1, RecipFlag=True, RossHS=False, MODISSPARSE=True, RossType="Thick") uncertainty = refl*0 + unc[band_no-1] @@ -190,7 +194,7 @@ def get_band_data(self, the_date, band_no): # find the requested date date_idx = self.dates.index(the_date) BHR = [] - for band in xrange(7): + for band in range(7): g = gdal.Open(self.kernels[date_idx].replace( "b0", "b%d" % band)) kernels = g.ReadAsArray() # 3*nx*ny diff --git a/kafka/input_output/utils.py b/kafka/input_output/utils.py index d138f9e..827f396 100644 --- a/kafka/input_output/utils.py +++ b/kafka/input_output/utils.py @@ -1,6 +1,9 @@ -import numpy as np -import gdal -import osr +try: + from osgeo import gdal, ogr, osr +except ImportError: + import gdal + import osr + """ I need to put some utils in here. Seems like the most obvious place... """ diff --git a/kafka/linear_kf.py b/kafka/linear_kf.py index 15c20ff..72b8a82 100644 --- a/kafka/linear_kf.py +++ b/kafka/linear_kf.py @@ -24,24 +24,18 @@ from collections import namedtuple import numpy as np - import scipy.sparse as sp -# from scipy.spatial.distance import squareform, pdist - -# from utils import matrix_squeeze, spsolve2, reconstruct_array - +from .inference import hessian_correction +from .inference import iterate_time_grid +from .inference import propagate_information_filter_LAI # eg from .inference import variational_kalman from .inference import variational_kalman_multiband -from .inference import locate_in_lut, run_emulator, create_uncertainty -from .inference import create_linear_observation_operator -from .inference import create_nonlinear_observation_operator -from .inference import iterate_time_grid -from .inference import propagate_information_filter_LAI # eg -from .inference import hessian_correction -from .inference import hessian_correction_multiband from .inference.kf_tools import propagate_and_blend_prior +# from scipy.spatial.distance import squareform, pdist +# from utils import matrix_squeeze, spsolve2, reconstruct_array + # Set up logging LOG = logging.getLogger(__name__+".linear_kf") diff --git a/setup.py b/setup.py index c2a3ed1..2fe526f 100644 --- a/setup.py +++ b/setup.py @@ -3,12 +3,6 @@ from setuptools import setup, find_packages requirements = [ - 'pytest', - 'numpy', - 'scipy', - 'gdal', - # 'BRDF_descriptors', # Not available for automatic installation - 'matplotlib' ] setup(name='KaFKA', @@ -16,4 +10,4 @@ author='MULTIPLY Team', packages=find_packages(), install_requires=requirements -) + )