Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions .github/workflows/git-sync.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: Sync with Original Repo
on:
schedule:
- cron: "0 0 * * Sun"



jobs:
sync:
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v2

- name: Fetch upstream
run: |
git remote add upstream https://github.com/multiply-org/KaFKA-InferenceEngine.git
git fetch upstream

- name: Check if there are changes
id: has_changes
run: echo "::set-output name=changed::$(git rev-parse HEAD != upstream/master)"

- name: Create new branch
run: |
git checkout -b update-upstream-${{ github.run_number }}
if: steps.has_changes.outputs.changed == 'true'

- name: Merge upstream changes
run: |
git merge upstream/master --no-edit
if: steps.has_changes.outputs.changed == 'true'

- name: Create pull request
uses: peter-evans/create-pull-request@v3
if: steps.has_changes.outputs.changed == 'true'
with:
title: 'Update from upstream repository'
commit-message: 'Merge latest changes from upstream repository'
branch: 'update-upstream'
base: 'master'
delete-branch: true
38 changes: 38 additions & 0 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: make
on:
pull_request:
push:
schedule:
- cron: '0 0 2 * *'

jobs:
build:
strategy:
fail-fast: false
max-parallel: 5
matrix:
environment_file: [environment.yml]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v3
with:
python-version: '3.10'
- name: Add conda to system path
run: |
# $CONDA is an environment variable pointing to the root of the miniconda directory
echo $CONDA/bin >> $GITHUB_PATH

- name: Install dependencies
run: |
conda env update --file ${{ matrix.environment_file }} --name base
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest
35 changes: 35 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
name: multiply-KaFKA-inference-engine

channels:
- conda-forge
- defaults

dependencies:
- python=>3.8
- gdal=2.4
- libgdal
- pyproj
- sentinelhub
- pyyaml
- numpy
- scipy
- setuptools
- gcc=12.1.0
- cxx-compiler
- matplotlib
- libgcc
- libstdcxx-ng
- lazrs-python
- laspy
- pyyaml
- setuptools
- pytest
- flake8
- scipy
- shapely
- pytest-shutil
- pip
- pip:
- .
- git+https://github.com/QCDIS/BRDF_descriptors.git
- git+https://github.com/QCDIS/atmospheric_correction.git
7 changes: 5 additions & 2 deletions kafka/inference/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@
import scipy.sparse.linalg as spl
import datetime as dt
import os
import gdal
try:
from osgeo import gdal
except ImportError:
import gdal

import logging
LOG = logging.getLogger(__name__)
Expand Down Expand Up @@ -340,7 +343,7 @@ def spsolve2(a, b):
a_lu = spl.splu(a.tocsc()) # LU decomposition for sparse a
out = sp.lil_matrix((a.shape[1], b.shape[1]), dtype=np.float32)
b_csc = b.tocsc()
for j in xrange(b.shape[1]):
for j in range(b.shape[1]):
bb = np.array(b_csc[j, :].todense()).squeeze()
out[j, j] = a_lu.solve(bb)[j]
return out.tocsr()
10 changes: 8 additions & 2 deletions kafka/input_output/Sentinel1_Observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,17 @@
import os
from collections import namedtuple

import gdal
try:
from osgeo import gdal
except ImportError:
import gdal

import numpy as np

import osr
try:
from osgeo import osr
except ImportError:
import osr

import scipy.sparse as sp

Expand Down
11 changes: 9 additions & 2 deletions kafka/input_output/Sentinel2_Observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,15 @@

import numpy as np
import scipy.sparse as sp # Required for unc
import gdal
import osr
try:
from osgeo import gdal
except ImportError:
import gdal

try:
from osgeo import osr
except ImportError:
import osr

import xml.etree.ElementTree as ET
from collections import namedtuple
Expand Down
14 changes: 9 additions & 5 deletions kafka/input_output/observations.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,18 +38,22 @@
MCD43A1/2 -> See BRDF_descriptors!

"""
import datetime
import _pickle as cPickle
import datetime
import glob
import os
from collections import namedtuple

from BRDF_descriptors import RetrieveBRDFDescriptors

import gdal
try:
from osgeo import gdal
except ImportError:
import gdal

from SIAC import kernels

import numpy as np
# from kernels import Kernels

import scipy.sparse as sp
from scipy.ndimage import zoom
Expand Down Expand Up @@ -134,7 +138,7 @@ def get_band_data(self, the_date, band_no):
vza = zoom(vza, 2, order=0)
sza = zoom(sza, 2, order=0)
mask = zoom(mask, 2, order=0)
K = Kernels(vza, sza, raa, LiType="Sparse", doIntegrals=False,
K = kernels.Kernels(vza, sza, raa, LiType="Sparse", doIntegrals=False,
normalise=1, RecipFlag=True,
RossHS=False, MODISSPARSE=True, RossType="Thick")
uncertainty = refl*0 + unc[band_no-1]
Expand Down Expand Up @@ -190,7 +194,7 @@ def get_band_data(self, the_date, band_no):
# find the requested date
date_idx = self.dates.index(the_date)
BHR = []
for band in xrange(7):
for band in range(7):
g = gdal.Open(self.kernels[date_idx].replace(
"b0", "b%d" % band))
kernels = g.ReadAsArray() # 3*nx*ny
Expand Down
9 changes: 6 additions & 3 deletions kafka/input_output/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import numpy as np
import gdal
import osr
try:
from osgeo import gdal, ogr, osr
except ImportError:
import gdal
import osr

"""
I need to put some utils in here. Seems like the most obvious place...
"""
Expand Down
18 changes: 6 additions & 12 deletions kafka/linear_kf.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,18 @@
from collections import namedtuple

import numpy as np

import scipy.sparse as sp

# from scipy.spatial.distance import squareform, pdist

# from utils import matrix_squeeze, spsolve2, reconstruct_array

from .inference import hessian_correction
from .inference import iterate_time_grid
from .inference import propagate_information_filter_LAI # eg
from .inference import variational_kalman
from .inference import variational_kalman_multiband
from .inference import locate_in_lut, run_emulator, create_uncertainty
from .inference import create_linear_observation_operator
from .inference import create_nonlinear_observation_operator
from .inference import iterate_time_grid
from .inference import propagate_information_filter_LAI # eg
from .inference import hessian_correction
from .inference import hessian_correction_multiband
from .inference.kf_tools import propagate_and_blend_prior

# from scipy.spatial.distance import squareform, pdist
# from utils import matrix_squeeze, spsolve2, reconstruct_array

# Set up logging

LOG = logging.getLogger(__name__+".linear_kf")
Expand Down
8 changes: 1 addition & 7 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,11 @@
from setuptools import setup, find_packages

requirements = [
'pytest',
'numpy',
'scipy',
'gdal',
# 'BRDF_descriptors', # Not available for automatic installation
'matplotlib'
]

setup(name='KaFKA',
description='MULTIPLY KaFKA inference engine',
author='MULTIPLY Team',
packages=find_packages(),
install_requires=requirements
)
)