Skip to content

Commit

Permalink
Update stable with changes from develop (#1273)
Browse files Browse the repository at this point in the history
* Fix deprecation warnings and speed up code

* Change last modified date

* Intertidal exposure (#1261)

* New files for intertidal exposure notebook

* minor editing to Load packages cells

* Updated discord and removed slack links

* Improved markdown linking to image and gif in Introduction

* Incorporated SS and MA reviews

* Updated to include RBT reviews

* Minor pip install and notebook naming edits. Add notebook to README

* adding renamed exposure notebook back into PR

* adding global SAR access through microsoft planetary compute (#1263)

* adding global SAR access through microsoft planetary compute

* Make minor spelling and formatting amendments.

* small changes for PR

---------

Co-authored-by: geoscience-aman <[email protected]>

* Update USAGE.rst (#1268)

Add Swinburne course for 2024

* Minor compatibility change for tide modelling package (#1269)

* Mitch predict_xr (#1270)

* add probability array output to predict_xr

* predict_xr at proba_max args

* predict_xr match arg names

* xr_predict deal with multiband prob outout

* xr_predict merge output probs

* clean up comments and spacing

* Update USAGE.rst (#1272)

Add new reference, Burton et al 2024 Enhancing long-term vegetation monitoring in Australia: a new approach for harmonising the Advanced Very High Resolution Radiometer normalised-difference vegetation (NVDI) with MODIS NDVI

* Fix broken code on `unstable` Sandbox image (#1274)

* Updates for pyTMD

* Fix contours bug due to groupby squeeze

* Try loosening pyTMD requirements

* Update tests to pass on both stable and unstable sandbox

* Fix pansharpening bug

---------

Co-authored-by: Aman Chopra <[email protected]>
Co-authored-by: geoscience-aman <[email protected]>
Co-authored-by: ClaireP <[email protected]>
Co-authored-by: Alex Bradley <[email protected]>
Co-authored-by: Bex Dunn <[email protected]>
Co-authored-by: Mitchell Lyons <[email protected]>
  • Loading branch information
7 people authored Oct 9, 2024
1 parent 6f95046 commit 8d22060
Show file tree
Hide file tree
Showing 8 changed files with 326 additions and 186 deletions.
93 changes: 41 additions & 52 deletions How_to_guides/Pansharpening.ipynb

Large diffs are not rendered by default.

18 changes: 9 additions & 9 deletions Tests/dea_tools/test_coastal.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ def test_model_tides_ensemble():
assert modelled_tides_df.columns.tolist() == ["tide_model", "tide_m"]
assert set(modelled_tides_df.tide_model) == set(models)
assert np.allclose(
modelled_tides_df.tide_m,
modelled_tides_df.tide_m.values,
[
-2.819,
-1.850,
Expand All @@ -352,7 +352,7 @@ def test_model_tides_ensemble():
-0.071,
-0.0158,
],
rtol=0.02,
atol=0.10,
)

# One-to-one mode
Expand Down Expand Up @@ -420,7 +420,7 @@ def test_model_tides_ensemble():

# Check values are expected
assert np.allclose(
modelled_tides_df.ensemble, [-2.819, 0.0730, -1.850, -0.069], rtol=0.01
modelled_tides_df.ensemble.values, [-2.819, 0.0730, -1.850, -0.069], atol=0.10
)

# Wide mode, custom functions
Expand Down Expand Up @@ -558,9 +558,9 @@ def test_pixel_tides(satellite_ds, measured_tides_ds, resolution):
longitude=x_coords, latitude=y_coords, time="2020-02-14", method="nearest"
)

# Test if extracted tides match expected results (to within ~3 cm)
# Test if extracted tides match expected results (to within ~12 cm)
expected_tides = [-1.82249, -1.977088, -1.973618, -2.071242]
assert np.allclose(extracted_tides.values, expected_tides, atol=0.03)
assert np.allclose(extracted_tides.values, expected_tides, atol=0.12)


def test_pixel_tides_quantile(satellite_ds):
Expand Down Expand Up @@ -603,7 +603,7 @@ def test_pixel_tides_quantile(satellite_ds):
longitude=x_coords, latitude=y_coords, method="nearest"
)

# Test if extracted tides match expected results (to within ~3 cm)
# Test if extracted tides match expected results (to within ~10 cm)
expected_tides = np.array(
[
[-1.83, -1.98, -1.98, -2.07],
Expand All @@ -614,7 +614,7 @@ def test_pixel_tides_quantile(satellite_ds):
[1.58, 1.61, 1.62, 1.64],
]
)
assert np.allclose(extracted_tides.values, expected_tides, atol=0.03)
assert np.allclose(extracted_tides.values, expected_tides, atol=0.10)


# Run test with quantile calculation off and on
Expand Down Expand Up @@ -793,7 +793,7 @@ def test_tidal_stats(satellite_ds, modelled_freq):
# Calculate tidal stats
tidal_stats_df = tidal_stats(satellite_ds, modelled_freq=modelled_freq)

# Compare outputs to expected results (within 5% or 0.05 m)
# Compare outputs to expected results (within 10% or 0.10 m)
expected_results = pd.Series(
{
"tidepost_lat": -18.001,
Expand All @@ -811,7 +811,7 @@ def test_tidal_stats(satellite_ds, modelled_freq):
"high_tide_offset": 0.308,
}
)
assert np.allclose(tidal_stats_df, expected_results, atol=0.05)
assert np.allclose(tidal_stats_df, expected_results, atol=0.10)


def test_glint_angle(angle_metadata_ds):
Expand Down
47 changes: 35 additions & 12 deletions Tools/dea_tools/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ def predict_xr(
chunk_size=None,
persist=False,
proba=False,
max_proba=True,
clean=False,
return_input=False,
):
Expand Down Expand Up @@ -255,6 +256,11 @@ def predict_xr(
distributed RAM.
proba : bool
If True, predict probabilities
max_proba : bool
If True, the probabilities array will be flattened to contain
only the probabiltiy for the "Predictions" class. If False,
the "Probabilities" object will be an array of prediction
probaiblities for each classes
clean : bool
If True, remove Infs and NaNs from input and output arrays
return_input : bool
Expand Down Expand Up @@ -282,7 +288,7 @@ def predict_xr(
input_xr.chunks["y"][0]
)

def _predict_func(model, input_xr, persist, proba, clean, return_input):
def _predict_func(model, input_xr, persist, proba, max_proba, clean, return_input):
x, y, crs = input_xr.x, input_xr.y, input_xr.geobox.crs

input_data = []
Expand Down Expand Up @@ -330,18 +336,35 @@ def _predict_func(model, input_xr, persist, proba, clean, return_input):
print(" probabilities...")
out_proba = model.predict_proba(input_data_flattened)

# convert to %
out_proba = da.max(out_proba, axis=1) * 100.0
# return either one band with the max probability, or the whole probability array
if max_proba == True:
print(" returning single probability band.")
out_proba = da.max(out_proba, axis=1) * 100.0
out_proba = out_proba.reshape(len(y), len(x))
out_proba = xr.DataArray(
out_proba, coords={"x": x, "y": y}, dims=["y", "x"]
)
output_xr["Probabilities"] = out_proba
else:
print(" returning class probability array.")
out_proba = out_proba * 100.0
class_names = model.classes_ # Get the unique class names from the fitted classifier

# Loop through each class (band)
probabilities_dataset = xr.Dataset()
for i, class_name in enumerate(class_names):
reshaped_band = out_proba[:, i].reshape(len(y), len(x))
reshaped_da = xr.DataArray(
reshaped_band, coords={"x": x, "y": y}, dims=["y", "x"]
)
probabilities_dataset[f"prob_{class_name}"] = reshaped_da

# merge in the probabilities
output_xr = xr.merge([output_xr, probabilities_dataset])

if clean == True:
out_proba = da.where(da.isfinite(out_proba), out_proba, 0)

out_proba = out_proba.reshape(len(y), len(x))

out_proba = xr.DataArray(
out_proba, coords={"x": x, "y": y}, dims=["y", "x"]
)
output_xr["Probabilities"] = out_proba


if return_input == True:
print(" input features...")
Expand Down Expand Up @@ -391,12 +414,12 @@ def _predict_func(model, input_xr, persist, proba, clean, return_input):
model = ParallelPostFit(model)
with joblib.parallel_backend("dask"):
output_xr = _predict_func(
model, input_xr, persist, proba, clean, return_input
model, input_xr, persist, proba, max_proba, clean, return_input
)

else:
output_xr = _predict_func(
model, input_xr, persist, proba, clean, return_input
model, input_xr, persist, proba, max_proba, clean, return_input
).compute()

return output_xr
Expand Down
Loading

0 comments on commit 8d22060

Please sign in to comment.