Skip to content

Commit

Permalink
Improve documentation, add documentation testing, and make all exampl…
Browse files Browse the repository at this point in the history
…es into scripts. (#101)

* Moved all example code to actual scripts. This allows testing that they work.

* Added new required dev-requirements and tests to check that the docs are working.

* Fixed docstring to work with the documentation better.

* Added opencv to dev-requirements.txt

* PATH fixes

* Removed dynamic doc versioning for now (didn't work with CI)

* Set cv2 full installation as requirement instead of lightweight one.

* Fixed environment variables to hopefully work better with CI

* Changed copy_tree function to work with python=3.7
  • Loading branch information
erikmannerfelt authored Apr 22, 2021
1 parent 3f68b02 commit 64f08b0
Show file tree
Hide file tree
Showing 14 changed files with 526 additions and 354 deletions.
3 changes: 3 additions & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
sphinx
numpydoc
sphinx_rtd_theme
sphinx_autodoc_typehints
sphinxcontrib-programoutput
numpy
scipy
rasterio
Expand All @@ -10,3 +12,4 @@ tqdm
git+https://github.com/GlacioHack/GeoUtils.git
scikit-gstat
flake8
opencv-contrib-python
92 changes: 92 additions & 0 deletions docs/source/code/comparison.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
"""Example code for the DEM comparison chapter (it's made like this to test the syntax)."""
#######################
# SECTION: Example data
#######################
from datetime import datetime

import geoutils as gu
import numpy as np

import xdem

# Download the necessary data. This may take a few minutes.
xdem.examples.download_longyearbyen_examples(overwrite=False)

# Load a reference DEM from 2009
dem_2009 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_ref_dem"], datetime=datetime(2009, 8, 1))
# Load a DEM from 1990
dem_1990 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_tba_dem"], datetime=datetime(1990, 8, 1))
# Load glacier outlines from 1990.
glaciers_1990 = gu.Vector(xdem.examples.FILEPATHS["longyearbyen_glacier_outlines"])
glaciers_2010 = gu.Vector(xdem.examples.FILEPATHS["longyearbyen_glacier_outlines_2010"])

# Make a dictionary of glacier outlines where the key represents the associated date.
outlines = {
datetime(1990, 8, 1): glaciers_1990,
datetime(2009, 8, 1): glaciers_2010,
}

# Fake a future DEM to have a time-series of three DEMs
dem_2060 = dem_2009.copy()
# Assume that all glacier values will be 30 m lower than in 2009
dem_2060.data[glaciers_2010.create_mask(dem_2060)] -= 30
dem_2060.datetime = datetime(2060, 8, 1)

##############################
# SECTION: Subtracting rasters
##############################

dem1, dem2 = dem_2009, dem_1990

ddem_data = dem1.data - dem2.data
# If we want to inherit the georeferencing information:
ddem_raster = xdem.DEM.from_array(ddem_data, dem1.transform, dem2.crs)

# TEXT HERE

ddem_raster = xdem.spatial_tools.subtract_rasters(dem1, dem2)

#############################
# SECTION: dDEM interpolation
#############################

ddem = xdem.dDEM(
raster=xdem.spatial_tools.subtract_rasters(dem_2009, dem_1990),
start_time=dem_1990.datetime,
end_time=dem_2009.datetime
)

# The example DEMs are void-free, so let's make some random voids.
ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) # Reset the mask
# Introduce 50000 nans randomly throughout the dDEM.
ddem.data.mask.ravel()[np.random.choice(ddem.data.size, 50000, replace=False)] = True

# SUBSECTION: Linear spatial interpolation

ddem.interpolate(method="linear")

# SUBSECTION: Local hypsometric interpolation

ddem.interpolate(method="local_hypsometric", reference_elevation=dem_2009, mask=glaciers_1990)

# SUBSECTION: Regional hypsometric interpolation

ddem.interpolate(method="regional_hypsometric", reference_elevation=dem_2009, mask=glaciers_1990)

###################################
# SECTION: The DEMCollection object
###################################

dems = xdem.DEMCollection(
[dem_1990, dem_2009, dem_2060],
outlines=outlines,
reference_dem=dem_2009
)

# TEXT HERE

dems.subtract_dems()
dems.get_cumulative_series(kind="dh", outlines_filter="NAME == 'Scott Turnerbreen'")

# Create an object that can be printed in the documentation.
scott_series = dems.get_cumulative_series(kind="dh", outlines_filter="NAME == 'Scott Turnerbreen'")
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
"""Plot an example of local hypsometric interpolation at Scott Turnerbreen, Svalbard."""
import geoutils as gu
import matplotlib.pyplot as plt
import numpy as np

import xdem

xdem.examples.download_longyearbyen_examples(overwrite=False)

dem_2009 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_ref_dem"])
dem_1990 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_tba_dem"])
outlines_1990 = gu.Vector(xdem.examples.FILEPATHS["longyearbyen_glacier_outlines"])

ddem = xdem.dDEM(
xdem.spatial_tools.subtract_rasters(dem_2009, dem_1990, resampling_method="nearest"),
start_time=np.datetime64("1990-08-01"),
end_time=np.datetime64("2009-08-01")
)

ddem.data /= (2009 - 1990)

scott_1990 = outlines_1990.query("NAME == 'Scott Turnerbreen'")
mask = scott_1990.create_mask(ddem)

ddem_bins = xdem.volume.hypsometric_binning(ddem.data[mask], dem_2009.data[mask])
stds = xdem.volume.hypsometric_binning(ddem.data[mask], dem_2009.data[mask], aggregation_function=np.std)

plt.figure(figsize=(8, 8))
plt.grid(zorder=0)
plt.plot(ddem_bins["value"], ddem_bins.index.mid, linestyle="--", zorder=1)

plt.barh(
y=ddem_bins.index.mid,
width=stds["value"],
left=ddem_bins["value"] - stds["value"] / 2,
height=(ddem_bins.index.left - ddem_bins.index.right) * 1,
zorder=2,
edgecolor="black",
)
for bin in ddem_bins.index:
plt.vlines(ddem_bins.loc[bin, "value"], bin.left, bin.right, color="black", zorder=3)

plt.xlabel("Elevation change (m / a)")
plt.twiny()
plt.barh(
y=ddem_bins.index.mid,
width=ddem_bins["count"] / ddem_bins["count"].sum(),
left=0,
height=(ddem_bins.index.left - ddem_bins.index.right) * 1,
zorder=2,
alpha=0.2,
)
plt.xlabel("Normalized area distribution (hypsometry)")

plt.ylabel("Elevation (m a.s.l.)")

plt.tight_layout()
plt.show()
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
"""Plot an example of regional hypsometric interpolation in central Svalbard."""
import geoutils as gu
import matplotlib.pyplot as plt
import numpy as np

import xdem

xdem.examples.download_longyearbyen_examples(overwrite=False)

dem_2009 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_ref_dem"])
dem_1990 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_tba_dem"])
outlines_1990 = gu.Vector(xdem.examples.FILEPATHS["longyearbyen_glacier_outlines"])

ddem = xdem.dDEM(
xdem.spatial_tools.subtract_rasters(dem_2009, dem_1990, resampling_method="nearest"),
start_time=np.datetime64("1990-08-01"),
end_time=np.datetime64("2009-08-01")
)

ddem.data /= (2009 - 1990)

mask = outlines_1990.create_mask(ddem)

ddem_bins = xdem.volume.hypsometric_binning(ddem.data[mask], dem_2009.data[mask])
stds = xdem.volume.hypsometric_binning(ddem.data[mask], dem_2009.data[mask], aggregation_function=np.std)

plt.figure(figsize=(8, 8))
plt.grid(zorder=0)


plt.plot(ddem_bins["value"], ddem_bins.index.mid, linestyle="--", zorder=1)

plt.barh(
y=ddem_bins.index.mid,
width=stds["value"],
left=ddem_bins["value"] - stds["value"] / 2,
height=(ddem_bins.index.left - ddem_bins.index.right) * 1,
zorder=2,
edgecolor="black",
)
for bin in ddem_bins.index:
plt.vlines(ddem_bins.loc[bin, "value"], bin.left, bin.right, color="black", zorder=3)

plt.xlabel("Elevation change (m / a)")
plt.twiny()
plt.barh(
y=ddem_bins.index.mid,
width=ddem_bins["count"] / ddem_bins["count"].sum(),
left=0,
height=(ddem_bins.index.left - ddem_bins.index.right) * 1,
zorder=2,
alpha=0.2,
)
plt.xlabel("Normalized area distribution (hypsometry)")
plt.ylabel("Elevation (m a.s.l.)")

plt.tight_layout()
plt.show()
45 changes: 45 additions & 0 deletions docs/source/code/comparison_plot_spatial_interpolation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
"""Plot an example of spatial interpolation of randomly generated errors."""
import geoutils as gu
import matplotlib.pyplot as plt
import numpy as np

import xdem

xdem.examples.download_longyearbyen_examples(overwrite=False)

dem_2009 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_ref_dem"])
dem_1990 = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_tba_dem"])
outlines_1990 = gu.Vector(xdem.examples.FILEPATHS["longyearbyen_glacier_outlines"])

ddem = xdem.dDEM(
xdem.spatial_tools.subtract_rasters(dem_2009, dem_1990, resampling_method="nearest"),
start_time=np.datetime64("1990-08-01"),
end_time=np.datetime64("2009-08-01")
)
# The example DEMs are void-free, so let's make some random voids.
ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) # Reset the mask
# Introduce 50000 nans randomly throughout the dDEM.
ddem.data.mask.ravel()[np.random.choice(ddem.data.size, 50000, replace=False)] = True

ddem.interpolate(method="linear")

ylim = (300, 100)
xlim = (800, 1050)

plt.figure(figsize=(8, 5))
plt.subplot(121)
plt.imshow(ddem.data.squeeze(), cmap="coolwarm_r", vmin=-50, vmax=50)
plt.ylim(ylim)
plt.xlim(xlim)
plt.axis("off")
plt.title("dDEM with random voids")
plt.subplot(122)
plt.imshow(ddem.filled_data.squeeze(), cmap="coolwarm_r", vmin=-50, vmax=50)
plt.ylim(ylim)
plt.xlim(xlim)
plt.axis("off")
plt.title("Linearly interpolated dDEM")


plt.tight_layout()
plt.show()
11 changes: 11 additions & 0 deletions docs/source/code/comparison_print_cumulative_dh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""Print the Scott Turnerbreen dH series."""
import contextlib
import io
import sys

sys.path.insert(0, "code/") # The base directory is source/, so to find comparison.py, it has to be source/code/

with contextlib.redirect_stdout(io.StringIO()): # Import the script without printing anything.
import comparison

print(comparison.scott_series)
89 changes: 89 additions & 0 deletions docs/source/code/coregistration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"""Example code for the DEM coregistration chapter (it's made like this to test the syntax). """
#######################
# SECTION: Example data
#######################
import geoutils as gu
import numpy as np

import xdem
from xdem import coreg

# Download the necessary data. This may take a few minutes.
xdem.examples.download_longyearbyen_examples(overwrite=False)

# Load the data using xdem and geoutils (could be with rasterio and geopandas instead)
# Load a reference DEM from 2009
reference_dem = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_ref_dem"])
# Load a moderately well aligned DEM from 1990
dem_to_be_aligned = xdem.DEM(xdem.examples.FILEPATHS["longyearbyen_tba_dem"]).reproject(reference_dem, silent=True)
# Load glacier outlines from 1990. This will act as the unstable ground.
glacier_outlines = gu.Vector(xdem.examples.FILEPATHS["longyearbyen_glacier_outlines"])

# Prepare the inputs for coregistration.
ref_data = reference_dem.data.squeeze() # This is a numpy 2D array/masked_array
tba_data = dem_to_be_aligned.data.squeeze() # This is a numpy 2D array/masked_array
# This is a boolean numpy 2D array. Note the bitwise not (~) symbol
inlier_mask = ~glacier_outlines.create_mask(reference_dem)
transform = reference_dem.transform # This is a rio.transform.Affine object.

########################
# SECTION: Nuth and Kääb
########################

nuth_kaab = coreg.NuthKaab()
# Fit the data to a suitable x/y/z offset.
nuth_kaab.fit(ref_data, tba_data, transform=transform, inlier_mask=inlier_mask)

# Apply the transformation to the data (or any other data)
aligned_dem = nuth_kaab.apply(tba_data, transform=transform)

####################
# SECTION: Deramping
####################

# Instantiate a 1st order deramping object.
deramp = coreg.Deramp(degree=1)
# Fit the data to a suitable polynomial solution.
deramp.fit(ref_data, tba_data, transform=transform, inlier_mask=inlier_mask)

# Apply the transformation to the data (or any other data)
deramped_dem = deramp.apply(dem_to_be_aligned.data, transform=dem_to_be_aligned.transform)

##########################
# SECTION: Bias correction
##########################

bias_corr = coreg.BiasCorr()
# Note that the transform argument is not needed, since it is a simple vertical correction.
bias_corr.fit(ref_data, tba_data, inlier_mask=inlier_mask)

# Apply the bias to a DEM
corrected_dem = bias_corr.apply(tba_data, transform=None) # The transform does not need to be given for bias

# Use median bias instead
bias_median = coreg.BiasCorr(bias_func=np.median)

# bias_median.fit(... # etc.

##############
# SECTION: ICP
##############

# Instantiate the object with default parameters
icp = coreg.ICP()
# Fit the data to a suitable transformation.
icp.fit(ref_data, tba_data, transform=transform, inlier_mask=inlier_mask)

# Apply the transformation matrix to the data (or any other data)
aligned_dem = icp.apply(tba_data, transform=transform)

###################
# SECTION: Pipeline
###################

pipeline = coreg.CoregPipeline([coreg.BiasCorr(), coreg.ICP()])

# pipeline.fit(... # etc.

# This works identically to the syntax above
pipeline2 = coreg.BiasCorr() + coreg.ICP()
Loading

0 comments on commit 64f08b0

Please sign in to comment.