From 59d4438bb2dd9193d39dc4cbed71e2aa6e501301 Mon Sep 17 00:00:00 2001 From: Scott Henderson Date: Wed, 20 Oct 2021 17:12:59 -0700 Subject: [PATCH] Replace s3://snowex-data from tutorials with zenodo dataset (#160) * core-datasets updates * geospatial mods * mods to geospatial, lidar, nsidc * remove s3 from thermal tutorial * ML and microstructure updates * sar updates * camera tutorial update * final zenodo paths * fix zenodo download cells * bump cache key * fix sentinel1 data download cell * remove jupyterhub instruction from readme --- .github/workflows/cron.yaml | 2 +- .github/workflows/deploy.yaml | 2 +- .github/workflows/netlifypreview.yaml | 2 +- .github/workflows/test.yaml | 2 +- README.md | 9 +- .../timelapse-camera-tutorial.ipynb | 147 +++++---- .../core-datasets/02_data-package.ipynb | 231 ++++++--------- .../core-datasets/03_practice-querying.ipynb | 58 ++-- book/tutorials/geospatial/SNOTEL_query.ipynb | 280 ++++++++---------- book/tutorials/geospatial/raster.ipynb | 114 ++++--- book/tutorials/lidar/ASO_data_tutorial.ipynb | 29 +- book/tutorials/lidar/ICESat2_tutorial.ipynb | 111 ++++--- .../Machine_Learning_Tutorial.ipynb | 146 +++++---- .../microstructure-tutorial.ipynb | 23 +- .../nsidc-access/nsidc-data-access.ipynb | 7 - book/tutorials/sar/sentinel1.ipynb | 67 +++-- book/tutorials/sar/swesarr.ipynb | 168 ++++++----- book/tutorials/sar/uavsar.ipynb | 141 ++++----- .../thermal-ir/thermal-ir-data-download.ipynb | 139 ++++----- .../thermal-ir/thermal-ir-tutorial.ipynb | 258 ++++++++-------- 20 files changed, 902 insertions(+), 1034 deletions(-) diff --git a/.github/workflows/cron.yaml b/.github/workflows/cron.yaml index fe61f71..a9c8bc0 100644 --- a/.github/workflows/cron.yaml +++ b/.github/workflows/cron.yaml @@ -21,7 +21,7 @@ jobs: # NOTE: change key to "jupyterbook-N+1" to force rebuilding cache with: path: ./book/_build - key: jupyterbook-4 + key: jupyterbook-5 - name: Pull Docker Image run: | diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 6e636a0..c79f60e 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -23,7 +23,7 @@ jobs: # NOTE: change key "jupyterbook-N+1" to force rebuilding cache with: path: ./book/_build - key: jupyterbook-4 + key: jupyterbook-5 # NOTE: download build-artifact (_build) folder to inspect cache locally # - name: List Jupyter-Cache contents diff --git a/.github/workflows/netlifypreview.yaml b/.github/workflows/netlifypreview.yaml index 6515274..df2ebbf 100644 --- a/.github/workflows/netlifypreview.yaml +++ b/.github/workflows/netlifypreview.yaml @@ -25,7 +25,7 @@ jobs: # NOTE: change key to "jupyterbook-N+1" to force rebuilding cache with: path: ./book/_build - key: jupyterbook-4 + key: jupyterbook-5 - name: Pull SnowEX Docker Image run: | diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index a8fed47..f293bf7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -30,7 +30,7 @@ jobs: # NOTE: change key to "jupyterbook-N+1" to force rebuilding cache with: path: ./book/_build - key: jupyterbook-4 + key: jupyterbook-5 - name: Pull Docker Image run: | diff --git a/README.md b/README.md index c08118b..eb2a78e 100644 --- a/README.md +++ b/README.md @@ -3,14 +3,6 @@ https://snowex-hackweek.github.io/website -## Run tutorials on JupyterHub - -** NOTE: this is the preferred way to run tutorials, because you will have access to some external resources like s3://snowex-data ** - - 1. Log onto http://snowex.hackweek.io (you need to be a member of https://github.com/snowex-hackweek) - 2. Clone this repository `git clone https://github.com/snowex-hackweek/website` - 3. Navigate to tutorial notebook you want to run: `cd website/book/tutorials/raster` - ## Run tutorials on BinderHub [![badge](https://img.shields.io/static/v1.svg?logo=Jupyter&label=PangeoBinderAWS&message=us-west-2&color=orange)](https://aws-uswest2-binder.pangeo.io/v2/gh/snowex-hackweek/website/main?urlpath=git-pull%3Frepo%3Dhttps%253A%252F%252Fgithub.com%252Fsnowex-hackweek%252Fwebsite%26urlpath%3Dlab%252Ftree%252Fwebsite/book/tutorials%252F%26branch%3Dmain) @@ -27,6 +19,7 @@ docker compose up ## GitHub Deployment (changing book contents) The repository comes with a preconfigured GitHub Actions script so that any commits to the `main` branch are deployed the the GitHub Pages hosted website. + ## Local Development (adding tutorials) To add a tutorial as a jupyter notebook under [./book/tutorials](./book/tutorials), you'll want to use a consistent environment. The best approach for this is to use a [Docker](https://docs.docker.com/get-docker/) Image (this ensures tutorials run in the same environment whether it is your laptop, the event JupyterHub, or some other server): ``` diff --git a/book/tutorials/camera-traps-tutorial/timelapse-camera-tutorial.ipynb b/book/tutorials/camera-traps-tutorial/timelapse-camera-tutorial.ipynb index 248c09f..2b7d29f 100644 --- a/book/tutorials/camera-traps-tutorial/timelapse-camera-tutorial.ipynb +++ b/book/tutorials/camera-traps-tutorial/timelapse-camera-tutorial.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "appreciated-seller", + "id": "authentic-device", "metadata": {}, "source": [ "# Time-lapse Cameras and Snow Applications\n", @@ -20,7 +20,7 @@ }, { "cell_type": "markdown", - "id": "sorted-princeton", + "id": "awful-teens", "metadata": {}, "source": [ "## Time-lapse Cameras on Grand Mesa during SnowEx Field Campaigns\n", @@ -48,7 +48,7 @@ }, { "cell_type": "markdown", - "id": "foster-adams", + "id": "alleged-endorsement", "metadata": {}, "source": [ "### All the time-lapse camera sites from SnowEx 2017 and 2020 plotted together on Grand Mesa. \n", @@ -65,18 +65,7 @@ { "cell_type": "code", "execution_count": null, - "id": "legitimate-phase", - "metadata": {}, - "outputs": [], - "source": [ - "# Might need to reinstall folium to the JupyterHub each time you restart, unless it's added to the full environment. If so, use the following command line...\n", - "!pip install folium==0.12.1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "paperback-petroleum", + "id": "worthy-sympathy", "metadata": { "tags": [ "hide-input" @@ -84,9 +73,6 @@ }, "outputs": [], "source": [ - "# Hide the code block\n", - "{\"tags\": [\"hide-input\",]} # trying to prevent this code block from showing, code here: https://myst-nb.readthedocs.io/en/latest/use/hiding.html\n", - "\n", "# Import the mapping package\n", "import folium # folium interactive notebook plotting: quick start guide here: https://python-visualization.github.io/folium/quickstart.html\n", "\n", @@ -173,7 +159,7 @@ }, { "cell_type": "markdown", - "id": "delayed-philosophy", + "id": "loose-dodge", "metadata": {}, "source": [ "* $\\color{red}{\\text{2017 locations are in red}}$\n", @@ -182,7 +168,7 @@ }, { "cell_type": "markdown", - "id": "ultimate-digest", + "id": "stuffed-edwards", "metadata": {}, "source": [ "### An automated way of viewing and mapping time-lapse photos" @@ -190,7 +176,7 @@ }, { "cell_type": "markdown", - "id": "rapid-office", + "id": "subject-bahamas", "metadata": {}, "source": [ "**First, import all the packages we'll need for this tutorial**" @@ -199,7 +185,7 @@ { "cell_type": "code", "execution_count": null, - "id": "leading-mumbai", + "id": "ahead-watch", "metadata": {}, "outputs": [], "source": [ @@ -235,7 +221,7 @@ }, { "cell_type": "markdown", - "id": "dominant-interference", + "id": "linear-suicide", "metadata": {}, "source": [ "**We will map 2020 time-lapse camera locations on the Grand Mesa with the 2020 snow pits for reference.** \n", @@ -247,7 +233,7 @@ { "cell_type": "code", "execution_count": null, - "id": "static-bangkok", + "id": "closed-appreciation", "metadata": {}, "outputs": [], "source": [ @@ -261,12 +247,12 @@ "\n", "# Convert it to a geopandas df and visualize the dataframe\n", "camera_depths = query_to_geopandas(qry, engine)\n", - "print(camera_depths.head())" + "camera_depths.head()" ] }, { "cell_type": "markdown", - "id": "external-skating", + "id": "married-milwaukee", "metadata": {}, "source": [ "**Pull out the columns of interest to make it easier to visualize**\n", @@ -282,7 +268,7 @@ { "cell_type": "code", "execution_count": null, - "id": "coordinated-garage", + "id": "controlling-silence", "metadata": {}, "outputs": [], "source": [ @@ -291,7 +277,7 @@ }, { "cell_type": "markdown", - "id": "early-starter", + "id": "legendary-penalty", "metadata": {}, "source": [ "**Grab all the unique geometry objects (i.e., locations)**" @@ -300,7 +286,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fifteen-economy", + "id": "christian-sierra", "metadata": {}, "outputs": [], "source": [ @@ -311,7 +297,7 @@ }, { "cell_type": "markdown", - "id": "capital-warrior", + "id": "checked-settlement", "metadata": {}, "source": [ "**And, print out how many of each we found** " @@ -320,7 +306,7 @@ { "cell_type": "code", "execution_count": null, - "id": "protective-progress", + "id": "offshore-marsh", "metadata": {}, "outputs": [], "source": [ @@ -333,7 +319,7 @@ }, { "cell_type": "markdown", - "id": "representative-scene", + "id": "modern-airport", "metadata": {}, "source": [ "#### Plot the camera locations, using snow pit locations for reference." @@ -342,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "sensitive-action", + "id": "human-consideration", "metadata": {}, "outputs": [], "source": [ @@ -366,7 +352,7 @@ }, { "cell_type": "markdown", - "id": "surgical-polyester", + "id": "first-light", "metadata": {}, "source": [ "- What do you notice? Is there overlap between the snow pit and camera trap locations?" @@ -374,7 +360,7 @@ }, { "cell_type": "markdown", - "id": "blocked-surveillance", + "id": "previous-revolution", "metadata": {}, "source": [ "### Viewing the time-lapse photos\n", @@ -385,30 +371,47 @@ { "cell_type": "code", "execution_count": null, - "id": "accessory-slovenia", + "id": "another-weekend", "metadata": {}, "outputs": [], "source": [ - "!aws --no-progress s3 cp s3://snowex-data/tutorial-data/camera-trap/WSCT0378.JPG /tmp/WSCT0378.JPG\n", - "!aws --no-progress s3 cp s3://snowex-data/tutorial-data/camera-trap/WSCT0101.JPG /tmp/WSCT0101.JPG\n", - "!aws --no-progress s3 cp s3://snowex-data/tutorial-data/camera-trap/WSCT0742.JPG /tmp/WSCT0742.JPG" + "%%bash \n", + "\n", + "# Retrieve a copy of data files used in this tutorial from Zenodo.org:\n", + "# Re-running this cell will not re-download things if they already exist\n", + "\n", + "mkdir -p /tmp/tutorial-data\n", + "cd /tmp/tutorial-data\n", + "wget -q -nc -O data.zip https://zenodo.org/record/5504396/files/camera-trap.zip\n", + "unzip -q -n data.zip\n", + "rm data.zip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "written-submission", + "metadata": {}, + "outputs": [], + "source": [ + "TUTORIAL_DATA = '/tmp/tutorial-data/camera-trap'" ] }, { "cell_type": "markdown", - "id": "continental-checklist", + "id": "covered-eclipse", "metadata": {}, "source": [ ":::{admonition} Important Note :class: hint\n", "\n", - "Remember, this is a subset of sample images from SnowEx 2020 temporarily stored on SnowEx's Amazon Web Service (AWS) server. The final images for SnowEx 2017 and 2020 will all be available on NSIDC. \n", + "Remember, this is a subset of sample images from SnowEx 2020 temporarily stored for SnowEx Hackweek The final images for SnowEx 2017 and 2020 will all be available on NSIDC. \n", "\n", ":::" ] }, { "cell_type": "markdown", - "id": "spare-celtic", + "id": "informative-albuquerque", "metadata": {}, "source": [ "**Now display an example time-lapse image inside the notebook**\n", @@ -420,27 +423,27 @@ { "cell_type": "code", "execution_count": null, - "id": "close-characteristic", + "id": "local-rabbit", "metadata": {}, "outputs": [], "source": [ "# Pull and display the images from Oct, Jan, and May \n", - "october_img = Image(filename='/tmp/WSCT0101.JPG', width=500, height=350)\n", + "october_img = Image(filename=f'{TUTORIAL_DATA}/WSCT0101.JPG', width=500, height=350)\n", "print('Site E9B in October')\n", "display(october_img)\n", "\n", - "january_img = Image(filename='/tmp/WSCT0378.JPG', width=500, height=350)\n", + "january_img = Image(filename=f'{TUTORIAL_DATA}/WSCT0378.JPG', width=500, height=350)\n", "print('Site E9B in January')\n", "display(january_img)\n", "\n", - "may_img = Image(filename='/tmp/WSCT0742.JPG', width=500, height=350)\n", + "may_img = Image(filename=f'{TUTORIAL_DATA}/WSCT0742.JPG', width=500, height=350)\n", "print('Site E9B in May')\n", "display(may_img)" ] }, { "cell_type": "markdown", - "id": "widespread-cradle", + "id": "arabic-auckland", "metadata": {}, "source": [ "- What do you notice? Is this an open or closed canopy site? \n", @@ -450,7 +453,7 @@ }, { "cell_type": "markdown", - "id": "proved-budget", + "id": "northern-provider", "metadata": {}, "source": [ "## Time-lapse Camera Applications\n", @@ -465,18 +468,17 @@ { "cell_type": "code", "execution_count": null, - "id": "alert-camera", + "id": "reflected-greeting", "metadata": {}, "outputs": [], "source": [ - "!aws --no-progress s3 cp s3://snowex-data/tutorial-data/camera-trap/Picture1.png /tmp/camera-trap/Picture1.png\n", - "pil_img = Image(filename='/tmp/camera-trap/Picture1.png', width=800, height=500)\n", + "pil_img = Image(filename=f'{TUTORIAL_DATA}/Picture1.png', width=800, height=500)\n", "display(pil_img)" ] }, { "cell_type": "markdown", - "id": "computational-inclusion", + "id": "overall-mayor", "metadata": {}, "source": [ "**Figure 1: Equation to extract snow depth from camera images. For each image, take the difference in pixels between the length of a snow-free stake and the length of the stake and multiply by length(cm)/pixel. The ratio can be found by dividing the full length of the stake (304.8 cm) by the length of a snow-free stake in pixels.**\n", @@ -486,7 +488,7 @@ }, { "cell_type": "markdown", - "id": "increased-electron", + "id": "tough-department", "metadata": {}, "source": [ "#### **Plot the Snow Depth created from a Vegetated and an Open Camera Site**\n", @@ -497,7 +499,7 @@ }, { "cell_type": "markdown", - "id": "ranging-avatar", + "id": "revised-walnut", "metadata": {}, "source": [ "#### Let's start by visualizing where these two camera sites are on Grand Mesa\n", @@ -510,7 +512,7 @@ { "cell_type": "code", "execution_count": null, - "id": "proved-cameroon", + "id": "adequate-roller", "metadata": {}, "outputs": [], "source": [ @@ -523,7 +525,7 @@ }, { "cell_type": "markdown", - "id": "bacterial-universal", + "id": "neural-universal", "metadata": {}, "source": [ "**View associated time-lapse images using the AWS server.**" @@ -532,32 +534,21 @@ { "cell_type": "code", "execution_count": null, - "id": "closed-sessions", - "metadata": {}, - "outputs": [], - "source": [ - "!aws --no-progress s3 cp s3://snowex-data/tutorial-data/camera-trap/W1A/WSCT0013.JPG /tmp/camera-trap/W1A/WSCT0013.JPG\n", - "!aws --no-progress s3 cp s3://snowex-data/tutorial-data/camera-trap/W9A/WSCT0009.JPG /tmp/camera-trap/W9A/WSCT0009.JPG" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "silver-stockholm", + "id": "religious-gamma", "metadata": {}, "outputs": [], "source": [ - "open_canopy = Image(filename='/tmp/camera-trap/W1A/WSCT0013.JPG', width=500, height=350)\n", + "open_canopy = Image(filename=f'{TUTORIAL_DATA}/W1A/WSCT0013.JPG', width=500, height=350)\n", "print('Below is the open site, W1A')\n", "display(open_canopy)\n", - "closed_canopy = Image(filename='/tmp/camera-trap/W9A/WSCT0009.JPG', width=500, height=350)\n", + "closed_canopy = Image(filename=f'{TUTORIAL_DATA}/W9A/WSCT0009.JPG', width=500, height=350)\n", "print('Below is the forested site, W9A')\n", "display(closed_canopy)" ] }, { "cell_type": "markdown", - "id": "domestic-snowboard", + "id": "ranging-referral", "metadata": {}, "source": [ "**Grab the site data for both sites from the database**" @@ -566,7 +557,7 @@ { "cell_type": "code", "execution_count": null, - "id": "color-perfume", + "id": "portable-adelaide", "metadata": {}, "outputs": [], "source": [ @@ -587,7 +578,7 @@ }, { "cell_type": "markdown", - "id": "administrative-conflict", + "id": "completed-stock", "metadata": {}, "source": [ "**Plot the snow depth from open and forested time-lapse camera sites together**" @@ -596,7 +587,7 @@ { "cell_type": "code", "execution_count": null, - "id": "egyptian-diameter", + "id": "matched-luxury", "metadata": {}, "outputs": [], "source": [ @@ -616,7 +607,7 @@ }, { "cell_type": "markdown", - "id": "confidential-separation", + "id": "metallic-coupon", "metadata": {}, "source": [ "Previous forest-snow research, such as *Dickerson-Lange et al. (2017)*, have used similar analysis to determine the snow disappearance timing in forested regions of the Pacific Northwest. Further research could use this dataset to explore the following, \n", @@ -628,7 +619,7 @@ }, { "cell_type": "markdown", - "id": "prerequisite-comparative", + "id": "under-addition", "metadata": {}, "source": [ "### 2. Citizen Science Snow Classifications\n", @@ -640,7 +631,7 @@ }, { "cell_type": "markdown", - "id": "emotional-police", + "id": "cellular-being", "metadata": {}, "source": [ ":::{admonition} Potential Project Ideas\n", @@ -670,7 +661,7 @@ }, { "cell_type": "markdown", - "id": "figured-system", + "id": "statistical-reviewer", "metadata": {}, "source": [ "### **Thanks for attending this tutorial. We look forward to see what you will find in these datasets!**\n", diff --git a/book/tutorials/core-datasets/02_data-package.ipynb b/book/tutorials/core-datasets/02_data-package.ipynb index 56528e2..5e4b276 100644 --- a/book/tutorials/core-datasets/02_data-package.ipynb +++ b/book/tutorials/core-datasets/02_data-package.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "preliminary-reach", + "id": "heavy-pound", "metadata": {}, "source": [ "# Depths and Snow Pit Data Package Contents\n", @@ -17,7 +17,7 @@ { "cell_type": "code", "execution_count": null, - "id": "higher-credit", + "id": "permanent-psychology", "metadata": {}, "outputs": [], "source": [ @@ -31,15 +31,51 @@ "#plotting imports\n", "import matplotlib as mpl\n", "import matplotlib.pyplot as plt\n", - "plt.style.use(['seaborn-notebook'])\n", + "plt.style.use(['seaborn-notebook'])" + ] + }, + { + "cell_type": "markdown", + "id": "improving-squad", + "metadata": {}, + "source": [ + "## Access tutorial data from Zenodo\n", "\n", - "# unique imports\n", - "import s3fs #access data from the AWS s3 bucket" + "We've archived datasets used for 2021 Hackweek tutorials on [Zenodo](https://zenodo.org), to ensure that these tutorials can be run in the future. The following code pulls data from the Zenodo 'record' and unzips it:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "meaningful-devices", + "metadata": {}, + "outputs": [], + "source": [ + "%%bash \n", + "\n", + "# Retrieve a copy of data files used in this tutorial from Zenodo.org:\n", + "# Re-running this cell will not re-download things if they already exist\n", + "\n", + "mkdir -p /tmp/tutorial-data\n", + "cd /tmp/tutorial-data\n", + "wget -q -nc -O data.zip https://zenodo.org/record/5504396/files/core-datasets.zip\n", + "unzip -q -n data.zip\n", + "rm data.zip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "metropolitan-summer", + "metadata": {}, + "outputs": [], + "source": [ + "TUTORIAL_DATA = '/tmp/tutorial-data/core-datasets'" ] }, { "cell_type": "markdown", - "id": "authorized-messenger", + "id": "surrounded-somalia", "metadata": {}, "source": [ "## Download snow depth data from NSIDC\n", @@ -51,69 +87,49 @@ }, { "cell_type": "markdown", - "id": "necessary-atlas", + "id": "instructional-joining", "metadata": {}, "source": [ - "## Method 1: Programmatically download snow depth data from NSIDC" + "## Programmatically download snow depth data from NSIDC" ] }, { "cell_type": "code", "execution_count": null, - "id": "broadband-buffer", + "id": "impaired-gibraltar", "metadata": {}, "outputs": [], "source": [ - "%run './scripts/nsidc-download_SNEX20_SD.001.py' \n", - "print('Grand Mesa 2020 Snow Depth data download complete') " + "#os.chmod('/home/jovyan/.netrc', 0o600) #only necessary on snowex hackweek jupyterhub" ] }, { "cell_type": "code", "execution_count": null, - "id": "unauthorized-celebrity", + "id": "reflected-engagement", "metadata": {}, "outputs": [], "source": [ - "# show filename:\n", - "path = Path('./data/depths/')\n", - "\n", - "for filename in path.glob('*.csv'):\n", - " print(filename.name)" - ] - }, - { - "cell_type": "markdown", - "id": "sapphire-davis", - "metadata": {}, - "source": [ - "## Method 2: Access data from our shared resources server\n", - "Quick and easy access for hackweek or if you haven't gone through the steps to [configure programmatic access](https://snowex-hackweek.github.io/website/preliminary/earthdata.html#configure-programmatic-access-to-nasa-servers) to NASA servers yet. This pulls data from our AWS, S3 bucket" + "#%run './scripts/nsidc-download_SNEX20_SD.001.py' \n", + "#print('Grand Mesa 2020 Snow Depth data download complete') " ] }, { "cell_type": "code", "execution_count": null, - "id": "intensive-safety", + "id": "entertaining-stanley", "metadata": {}, "outputs": [], "source": [ - "# Uses AWS credentials on machine\n", - "fs = s3fs.S3FileSystem() \n", - "\n", - "# which data bucket?\n", - "bucket = 'snowex-data' \n", - " \n", - "# contents inside /depths \n", - "flist = fs.ls(f'{bucket}/tutorial-data/core-datasets/depths') \n", + "#path = Path('./data/depths/')\n", "\n", - "# show list of files\n", - "print('File list is: ', flist)" + "#for filename in path.glob('*.csv'):\n", + "# print(filename.name)" ] }, { "cell_type": "markdown", - "id": "brazilian-nickel", + "id": "individual-windsor", "metadata": {}, "source": [ "### Read the Depth File" @@ -122,14 +138,11 @@ { "cell_type": "code", "execution_count": null, - "id": "adopted-acrylic", + "id": "human-helicopter", "metadata": {}, "outputs": [], "source": [ - "# open depth data file, and create pandas dataframe\n", - "with fs.open(flist[0], 'rb') as f: \n", - " \n", - " df = pd.read_csv(f, sep=',', header=0, parse_dates=[[2,3]]) #parse the date[2] and time[3] columns such that they are read in as datetime dtypes\n", + "df = pd.read_csv(f'{TUTORIAL_DATA}/depths/SnowEx2020_SnowDepths_COGM_alldepths_v01.csv', sep=',', header=0, parse_dates=[[2,3]]) #parse the date[2] and time[3] columns such that they are read in as datetime dtypes\n", " \n", "print('file has been read, and is ready to use.')" ] @@ -137,7 +150,7 @@ { "cell_type": "code", "execution_count": null, - "id": "buried-march", + "id": "bigger-headset", "metadata": {}, "outputs": [], "source": [ @@ -147,7 +160,7 @@ }, { "cell_type": "markdown", - "id": "genuine-provision", + "id": "biological-captain", "metadata": {}, "source": [ "### Prep for Data Analysis" @@ -156,7 +169,7 @@ { "cell_type": "code", "execution_count": null, - "id": "handy-museum", + "id": "beginning-reproduction", "metadata": {}, "outputs": [], "source": [ @@ -179,7 +192,7 @@ }, { "cell_type": "markdown", - "id": "victorian-hundred", + "id": "dominican-plaza", "metadata": {}, "source": [ "#### Use .groupby() to sort the data set" @@ -188,7 +201,7 @@ { "cell_type": "code", "execution_count": null, - "id": "collective-birthday", + "id": "sweet-municipality", "metadata": {}, "outputs": [], "source": [ @@ -201,7 +214,7 @@ }, { "cell_type": "markdown", - "id": "brazilian-showcase", + "id": "successful-folder", "metadata": {}, "source": [ "#### ***Your turn***" @@ -210,7 +223,7 @@ { "cell_type": "code", "execution_count": null, - "id": "departmental-bernard", + "id": "molecular-globe", "metadata": {}, "outputs": [], "source": [ @@ -225,7 +238,7 @@ }, { "cell_type": "markdown", - "id": "second-wales", + "id": "authentic-robertson", "metadata": {}, "source": [ "#### Find depths associated with a certain measurement tool" @@ -234,7 +247,7 @@ { "cell_type": "code", "execution_count": null, - "id": "mediterranean-angle", + "id": "wicked-break", "metadata": {}, "outputs": [], "source": [ @@ -244,7 +257,7 @@ { "cell_type": "code", "execution_count": null, - "id": "published-liability", + "id": "apparent-guide", "metadata": {}, "outputs": [], "source": [ @@ -255,7 +268,7 @@ }, { "cell_type": "markdown", - "id": "naval-parker", + "id": "accessible-organ", "metadata": {}, "source": [ "#### ***Your turn***" @@ -264,7 +277,7 @@ { "cell_type": "code", "execution_count": null, - "id": "micro-patient", + "id": "proved-video", "metadata": {}, "outputs": [], "source": [ @@ -276,7 +289,7 @@ }, { "cell_type": "markdown", - "id": "demanding-antarctica", + "id": "periodic-estate", "metadata": {}, "source": [ "Let's make sure we all have the same pd.DataFrame() again" @@ -285,7 +298,7 @@ { "cell_type": "code", "execution_count": null, - "id": "wanted-forwarding", + "id": "searching-reducing", "metadata": {}, "outputs": [], "source": [ @@ -296,7 +309,7 @@ }, { "cell_type": "markdown", - "id": "stopped-stick", + "id": "broke-canyon", "metadata": {}, "source": [ "### Plotting" @@ -305,7 +318,7 @@ { "cell_type": "code", "execution_count": null, - "id": "entire-combine", + "id": "aquatic-academy", "metadata": {}, "outputs": [], "source": [ @@ -322,7 +335,7 @@ { "cell_type": "code", "execution_count": null, - "id": "representative-processing", + "id": "russian-plenty", "metadata": {}, "outputs": [], "source": [ @@ -336,7 +349,7 @@ }, { "cell_type": "markdown", - "id": "analyzed-contents", + "id": "friendly-harassment", "metadata": {}, "source": [ "## Download snow pit data from NSIDC\n", @@ -347,74 +360,27 @@ }, { "cell_type": "markdown", - "id": "toxic-bible", + "id": "prescribed-bristol", "metadata": {}, "source": [ - "## Method 1: Programmatically download snow pit data from NSIDC" + "## Programmatically download snow pit data from NSIDC" ] }, { "cell_type": "code", "execution_count": null, - "id": "renewable-despite", + "id": "descending-tender", "metadata": {}, "outputs": [], "source": [ "# load snow pit data\n", - "%run 'scripts/nsidc-download_SNEX20_GM_SP.001.py'\n", - "print('Grand Mesa 2020 Snow Pit data download complete')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bearing-jewel", - "metadata": {}, - "outputs": [], - "source": [ - "# show filenames\n", - "path = Path('./data/pits/')\n", - "\n", - "for i, filename in enumerate(path.glob('*5N19*')):\n", - " print(i, filename.name)" - ] - }, - { - "cell_type": "markdown", - "id": "fundamental-rotation", - "metadata": {}, - "source": [ - "## Method 2: Access data from our shared resources server\n", - "Again, quick access for hackweek or if you haven't gone through the steps to [configure programmatic access](https://snowex-hackweek.github.io/website/preliminary/earthdata.html#configure-programmatic-access-to-nasa-servers) to NASA servers yet. This pulls data from our AWS, S3 bucket" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "restricted-notification", - "metadata": {}, - "outputs": [], - "source": [ - "# Uses AWS credentials on machine\n", - "fs = s3fs.S3FileSystem() \n", - "\n", - "# which data bucket?\n", - "bucket = 'snowex-data' \n", - " \n", - "# contents inside /pits \n", - "flist = fs.ls(f'{bucket}/tutorial-data/core-datasets/pits/')\n", - "\n", - "# which files do we want?\n", - "suffix='.csv'\n", - "\n", - "# list comprehension to get .csv files (not .xlsx or .jpgs)\n", - "csv_files = [filename for filename in flist if filename.endswith(suffix)]\n", - "csv_files[:5]" + "#%run 'scripts/nsidc-download_SNEX20_GM_SP.001.py'\n", + "#print('Grand Mesa 2020 Snow Pit data download complete')" ] }, { "cell_type": "markdown", - "id": "alien-orbit", + "id": "removable-ballet", "metadata": {}, "source": [ "### Don't want to work with all the files? Method to filter files" @@ -423,19 +389,21 @@ { "cell_type": "code", "execution_count": null, - "id": "incoming-accuracy", + "id": "charitable-madison", "metadata": {}, "outputs": [], "source": [ "# what files would you like to find?\n", "parameter = 'temperature'\n", "pitID = '5N19'\n", - "date = '20200128'" + "date = '20200128'\n", + "\n", + "path = '{}/pits/SnowEx20_SnowPits_GMIOP_{}_{}_{}_v01.csv'.format(TUTORIAL_DATA, date, pitID, parameter)" ] }, { "cell_type": "markdown", - "id": "orange-zealand", + "id": "inside-speed", "metadata": {}, "source": [ "### Read the Pit Parameter File" @@ -444,19 +412,17 @@ { "cell_type": "code", "execution_count": null, - "id": "pretty-terrain", + "id": "dressed-train", "metadata": {}, "outputs": [], "source": [ - "with fs.open('/snowex-data/tutorial-data/core-datasets/pits/SnowEx20_SnowPits_GMIOP_{}_{}_{}_v01.csv'.format(date, pitID, parameter), 'rb') as f:\n", - " \n", - " t = pd.read_csv(f, header=7)\n", + "t = pd.read_csv(path, header=7)\n", "t" ] }, { "cell_type": "markdown", - "id": "dental-summer", + "id": "fatty-accounting", "metadata": {}, "source": [ "### Plotting" @@ -465,7 +431,7 @@ { "cell_type": "code", "execution_count": null, - "id": "suitable-livestock", + "id": "crude-charter", "metadata": {}, "outputs": [], "source": [ @@ -481,22 +447,21 @@ { "cell_type": "code", "execution_count": null, - "id": "rising-rendering", + "id": "higher-middle", "metadata": {}, "outputs": [], "source": [ "# grab a different pit parameter file\n", "parameter = 'density'\n", - "with fs.open('/snowex-data/tutorial-data/core-datasets/pits/SnowEx20_SnowPits_GMIOP_{}_{}_{}_v01.csv'.format(date, pitID, parameter), 'rb') as f:\n", - "\n", - " d = pd.read_csv(f, header=7)\n", + "path = '/tmp/tutorial-data/core-datasets/pits/SnowEx20_SnowPits_GMIOP_{}_{}_{}_v01.csv'.format(date, pitID, parameter)\n", + "d = pd.read_csv(path, header=7)\n", "d" ] }, { "cell_type": "code", "execution_count": null, - "id": "described-sharp", + "id": "quality-manitoba", "metadata": {}, "outputs": [], "source": [ @@ -508,7 +473,7 @@ { "cell_type": "code", "execution_count": null, - "id": "reverse-newfoundland", + "id": "relevant-going", "metadata": {}, "outputs": [], "source": [ @@ -545,14 +510,6 @@ "ax.set_xlabel('Density kg/m3')\n", "ax.set_ylabel('Snow Depth (cm)') " ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "governmental-solid", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/book/tutorials/core-datasets/03_practice-querying.ipynb b/book/tutorials/core-datasets/03_practice-querying.ipynb index 9a37a7a..8de61df 100644 --- a/book/tutorials/core-datasets/03_practice-querying.ipynb +++ b/book/tutorials/core-datasets/03_practice-querying.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "minimal-cuisine", + "id": "urban-steal", "metadata": {}, "source": [ "# Practice Querying the Snowexsql Database\n", @@ -18,7 +18,7 @@ { "cell_type": "code", "execution_count": null, - "id": "turned-discussion", + "id": "floating-tennis", "metadata": {}, "outputs": [], "source": [ @@ -36,7 +36,7 @@ { "cell_type": "code", "execution_count": null, - "id": "stable-reconstruction", + "id": "charitable-wages", "metadata": {}, "outputs": [], "source": [ @@ -49,7 +49,7 @@ }, { "cell_type": "markdown", - "id": "arranged-checkout", + "id": "unlikely-dollar", "metadata": {}, "source": [ "## Snow Pit data are contained in the following data tables: \n", @@ -66,7 +66,7 @@ { "cell_type": "code", "execution_count": null, - "id": "healthy-letters", + "id": "stupid-pathology", "metadata": {}, "outputs": [], "source": [ @@ -81,7 +81,7 @@ }, { "cell_type": "markdown", - "id": "returning-locator", + "id": "unlikely-immune", "metadata": {}, "source": [ "#### 1a). Unsure of the flight date, but know which sensor you'd like to overlap with, here's how:" @@ -90,7 +90,7 @@ { "cell_type": "code", "execution_count": null, - "id": "french-doubt", + "id": "polished-boxing", "metadata": {}, "outputs": [], "source": [ @@ -131,7 +131,7 @@ }, { "cell_type": "markdown", - "id": "passing-virginia", + "id": "hundred-namibia", "metadata": {}, "source": [ "#### 1b). Want to select an exact flight date match? Here's how:" @@ -140,7 +140,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cooked-chassis", + "id": "liberal-consent", "metadata": {}, "outputs": [], "source": [ @@ -162,7 +162,7 @@ }, { "cell_type": "markdown", - "id": "innovative-housing", + "id": "compact-printer", "metadata": {}, "source": [ "#### 1c). Want to select a range of dates near the flight date? Here's how:" @@ -171,7 +171,7 @@ { "cell_type": "code", "execution_count": null, - "id": "provincial-equipment", + "id": "varying-influence", "metadata": {}, "outputs": [], "source": [ @@ -197,7 +197,7 @@ }, { "cell_type": "markdown", - "id": "creative-railway", + "id": "located-redhead", "metadata": {}, "source": [ "#### 1d). Have a known date that you wish to select data for, here's how:" @@ -206,7 +206,7 @@ { "cell_type": "code", "execution_count": null, - "id": "marine-medline", + "id": "hundred-albania", "metadata": {}, "outputs": [], "source": [ @@ -252,7 +252,7 @@ }, { "cell_type": "markdown", - "id": "excessive-fever", + "id": "emotional-america", "metadata": {}, "source": [ "### Nice work, almost done here!" @@ -260,7 +260,7 @@ }, { "cell_type": "markdown", - "id": "necessary-daisy", + "id": "criminal-insured", "metadata": {}, "source": [ "## Classify pit data based on the depth and vegetation matrix\n", @@ -277,7 +277,7 @@ { "cell_type": "code", "execution_count": null, - "id": "nuclear-memphis", + "id": "ranging-performance", "metadata": {}, "outputs": [], "source": [ @@ -314,7 +314,7 @@ }, { "cell_type": "markdown", - "id": "premium-stylus", + "id": "chinese-likelihood", "metadata": {}, "source": [ "#### 2b). Distinguish pits by snow depth classes: \n", @@ -328,7 +328,7 @@ { "cell_type": "code", "execution_count": null, - "id": "corresponding-butterfly", + "id": "growing-glance", "metadata": {}, "outputs": [], "source": [ @@ -366,7 +366,7 @@ { "cell_type": "code", "execution_count": null, - "id": "searching-compiler", + "id": "floral-insight", "metadata": {}, "outputs": [], "source": [ @@ -400,7 +400,7 @@ { "cell_type": "code", "execution_count": null, - "id": "directed-squad", + "id": "marked-government", "metadata": {}, "outputs": [], "source": [ @@ -414,7 +414,7 @@ }, { "cell_type": "markdown", - "id": "generic-marketing", + "id": "proof-antarctica", "metadata": {}, "source": [ "### Plot" @@ -423,7 +423,7 @@ { "cell_type": "code", "execution_count": null, - "id": "funky-force", + "id": "emotional-mobile", "metadata": {}, "outputs": [], "source": [ @@ -436,7 +436,7 @@ { "cell_type": "code", "execution_count": null, - "id": "married-enforcement", + "id": "smooth-infrastructure", "metadata": {}, "outputs": [], "source": [ @@ -449,7 +449,7 @@ { "cell_type": "code", "execution_count": null, - "id": "shaped-venue", + "id": "statutory-antique", "metadata": {}, "outputs": [], "source": [ @@ -460,21 +460,13 @@ { "cell_type": "code", "execution_count": null, - "id": "contemporary-madness", + "id": "stunning-angle", "metadata": {}, "outputs": [], "source": [ "# Close your session to avoid hanging transactions\n", "session.close()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "abroad-hepatitis", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/book/tutorials/geospatial/SNOTEL_query.ipynb b/book/tutorials/geospatial/SNOTEL_query.ipynb index a9d0308..f48d383 100644 --- a/book/tutorials/geospatial/SNOTEL_query.ipynb +++ b/book/tutorials/geospatial/SNOTEL_query.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "adverse-network", + "id": "hired-infrastructure", "metadata": {}, "source": [ "# Dynamic Query of SNOTEL data\n", @@ -14,7 +14,7 @@ }, { "cell_type": "markdown", - "id": "active-battle", + "id": "social-compression", "metadata": {}, "source": [ "## Introduction\n", @@ -45,7 +45,7 @@ }, { "cell_type": "markdown", - "id": "smart-reflection", + "id": "protecting-diary", "metadata": {}, "source": [ "## CUAHSI WOF server and automated Python data queries\n", @@ -58,18 +58,17 @@ { "cell_type": "code", "execution_count": null, - "id": "sunset-means", + "id": "consistent-eleven", "metadata": {}, "outputs": [], "source": [ "#This is the latest CUAHSI API endpoint\n", - "#http://his.cuahsi.org/wofws.html\n", "wsdlurl = 'https://hydroportal.cuahsi.org/Snotel/cuahsi_1_1.asmx?WSDL'" ] }, { "cell_type": "markdown", - "id": "governmental-cookie", + "id": "monetary-politics", "metadata": {}, "source": [ "### Acronym soup\n", @@ -85,7 +84,7 @@ "\n", "There are a few packages out there that offer convenience functions to query the online SNOTEL databases and unpack the results. \n", "* climata (https://pypi.org/project/climata/) - last commit Sept 2017 (not a good sign)\n", - "* ulmo (https://github.com/ulmo-dev/ulmo) - last commit Oct 2020 (will be superseded by a package called Quest, but still maintained by [Emilio Mayorga](https://apl.uw.edu/people/profile.php?last_name=Mayorga&first_name=Emilio) over at UW APL)\n", + "* ulmo (https://github.com/ulmo-dev/ulmo) - maintained by [Emilio Mayorga](https://apl.uw.edu/people/profile.php?last_name=Mayorga&first_name=Emilio) over at UW APL)\n", "\n", "You can also write your own queries using the Python `requests` module and some built-in XML parsing libraries.\n", "\n", @@ -94,42 +93,10 @@ "We will use ulmo with daily data for this exercise, but please feel free to experiment with hourly data, other variables or other approaches to fetch SNOTEL data." ] }, - { - "cell_type": "markdown", - "id": "clear-migration", - "metadata": {}, - "source": [ - "### Important ulmo installation note\n", - "\n", - "We're going to use the latest development version of ulmo, straight from the github source! This is a good exercise, and will show you how to install a package directly from source code on github." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "noted-moore", - "metadata": {}, - "outputs": [], - "source": [ - "#Install directly from github repo main branch\n", - "%pip install -q git+https://github.com/ulmo-dev/ulmo.git" - ] - }, { "cell_type": "code", "execution_count": null, - "id": "defined-imaging", - "metadata": {}, - "outputs": [], - "source": [ - "#Despite warning, shouldn't need to restart kernel if all goes well\n", - "import ulmo" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "living-illness", + "id": "promotional-quantity", "metadata": {}, "outputs": [], "source": [ @@ -140,12 +107,13 @@ "import pandas as pd\n", "import geopandas as gpd\n", "from shapely.geometry import Point\n", - "import contextily as ctx" + "import contextily as ctx\n", + "import ulmo" ] }, { "cell_type": "markdown", - "id": "developmental-calendar", + "id": "streaming-mercy", "metadata": {}, "source": [ "## Part 1: Spatial Query SNOTEL sites\n", @@ -156,7 +124,7 @@ { "cell_type": "code", "execution_count": null, - "id": "unnecessary-input", + "id": "natural-union", "metadata": {}, "outputs": [], "source": [ @@ -166,7 +134,7 @@ { "cell_type": "code", "execution_count": null, - "id": "necessary-actress", + "id": "closing-format", "metadata": {}, "outputs": [], "source": [ @@ -176,7 +144,7 @@ }, { "cell_type": "markdown", - "id": "suited-saudi", + "id": "knowing-marriage", "metadata": {}, "source": [ "### Store the dictionary as a Pandas DataFrame called `sites_df`\n", @@ -188,7 +156,7 @@ { "cell_type": "code", "execution_count": null, - "id": "super-kernel", + "id": "young-crazy", "metadata": {}, "outputs": [], "source": [ @@ -198,7 +166,7 @@ }, { "cell_type": "markdown", - "id": "athletic-camcorder", + "id": "comprehensive-sugar", "metadata": {}, "source": [ "### Clean up the DataFrame and prepare Point geometry objects\n", @@ -211,7 +179,7 @@ { "cell_type": "code", "execution_count": null, - "id": "distributed-physiology", + "id": "meaningful-pixel", "metadata": {}, "outputs": [], "source": [ @@ -221,7 +189,7 @@ { "cell_type": "code", "execution_count": null, - "id": "skilled-tower", + "id": "approximate-spare", "metadata": {}, "outputs": [], "source": [ @@ -231,7 +199,7 @@ }, { "cell_type": "markdown", - "id": "expensive-chick", + "id": "short-fiction", "metadata": {}, "source": [ "### Review output\n", @@ -244,7 +212,7 @@ { "cell_type": "code", "execution_count": null, - "id": "desperate-profit", + "id": "fresh-pierre", "metadata": {}, "outputs": [], "source": [ @@ -254,7 +222,7 @@ { "cell_type": "code", "execution_count": null, - "id": "adequate-payday", + "id": "basic-accused", "metadata": {}, "outputs": [], "source": [ @@ -264,7 +232,7 @@ { "cell_type": "code", "execution_count": null, - "id": "coral-domestic", + "id": "advised-schema", "metadata": {}, "outputs": [], "source": [ @@ -273,7 +241,7 @@ }, { "cell_type": "markdown", - "id": "sporting-causing", + "id": "outer-disorder", "metadata": {}, "source": [ "### Convert to a Geopandas GeoDataFrame\n", @@ -284,7 +252,7 @@ { "cell_type": "code", "execution_count": null, - "id": "smoking-robert", + "id": "desperate-occurrence", "metadata": {}, "outputs": [], "source": [ @@ -295,7 +263,7 @@ { "cell_type": "code", "execution_count": null, - "id": "hollywood-silver", + "id": "eight-advancement", "metadata": {}, "outputs": [], "source": [ @@ -304,7 +272,7 @@ }, { "cell_type": "markdown", - "id": "nervous-denmark", + "id": "strong-cross", "metadata": {}, "source": [ "### Create a scatterplot showing elevation values for all sites" @@ -313,7 +281,7 @@ { "cell_type": "code", "execution_count": null, - "id": "genetic-period", + "id": "international-employment", "metadata": {}, "outputs": [], "source": [ @@ -325,7 +293,7 @@ { "cell_type": "code", "execution_count": null, - "id": "experimental-grade", + "id": "union-anger", "metadata": {}, "outputs": [], "source": [ @@ -338,7 +306,7 @@ }, { "cell_type": "markdown", - "id": "approximate-converter", + "id": "serious-characteristic", "metadata": {}, "source": [ "### Exclude the Alaska (AK) points to isolate points over Western U.S.\n", @@ -349,7 +317,7 @@ { "cell_type": "code", "execution_count": null, - "id": "conscious-indonesia", + "id": "trained-improvement", "metadata": {}, "outputs": [], "source": [ @@ -358,7 +326,7 @@ }, { "cell_type": "markdown", - "id": "japanese-frederick", + "id": "sustainable-quarterly", "metadata": {}, "source": [ "* Alternatively, can use a spatial filter (see GeoPandas `cx` indexer functionality for a bounding box)" @@ -367,7 +335,7 @@ { "cell_type": "code", "execution_count": null, - "id": "centered-closure", + "id": "facial-geneva", "metadata": {}, "outputs": [], "source": [ @@ -378,7 +346,7 @@ { "cell_type": "code", "execution_count": null, - "id": "focal-rubber", + "id": "demographic-hospital", "metadata": {}, "outputs": [], "source": [ @@ -387,7 +355,7 @@ }, { "cell_type": "markdown", - "id": "desirable-refund", + "id": "waiting-examination", "metadata": {}, "source": [ "### Update your scatterplot as sanity check\n", @@ -397,7 +365,7 @@ { "cell_type": "code", "execution_count": null, - "id": "minus-spotlight", + "id": "auburn-stomach", "metadata": {}, "outputs": [], "source": [ @@ -409,7 +377,7 @@ }, { "cell_type": "markdown", - "id": "loaded-stake", + "id": "acute-healthcare", "metadata": {}, "source": [ "### Export SNOTEL site GeoDataFrame as a geojson\n", @@ -419,7 +387,7 @@ { "cell_type": "code", "execution_count": null, - "id": "separate-stack", + "id": "limited-mumbai", "metadata": {}, "outputs": [], "source": [ @@ -429,7 +397,7 @@ { "cell_type": "code", "execution_count": null, - "id": "nasty-surveillance", + "id": "former-russian", "metadata": {}, "outputs": [], "source": [ @@ -440,7 +408,7 @@ }, { "cell_type": "markdown", - "id": "wooden-dodge", + "id": "least-newark", "metadata": {}, "source": [ "## Part 2: Spatial filter points by polygon" @@ -448,7 +416,7 @@ }, { "cell_type": "markdown", - "id": "hollow-license", + "id": "middle-norway", "metadata": {}, "source": [ "### Load Grand Mesa Polygon" @@ -457,7 +425,7 @@ { "cell_type": "code", "execution_count": null, - "id": "earlier-tract", + "id": "bottom-classroom", "metadata": {}, "outputs": [], "source": [ @@ -467,7 +435,7 @@ { "cell_type": "code", "execution_count": null, - "id": "adjustable-improvement", + "id": "weekly-miami", "metadata": {}, "outputs": [], "source": [ @@ -477,7 +445,7 @@ { "cell_type": "code", "execution_count": null, - "id": "correct-comparison", + "id": "finite-stone", "metadata": {}, "outputs": [], "source": [ @@ -486,7 +454,7 @@ }, { "cell_type": "markdown", - "id": "piano-sphere", + "id": "central-lover", "metadata": {}, "source": [ "## A quick aside on `geometry` objects" @@ -494,7 +462,7 @@ }, { "cell_type": "markdown", - "id": "given-commissioner", + "id": "seventh-greene", "metadata": {}, "source": [ "### Vector data contain `geometry` objects\n", @@ -506,7 +474,7 @@ }, { "cell_type": "markdown", - "id": "visible-programmer", + "id": "great-trust", "metadata": {}, "source": [ "![Geometry types](https://datacarpentry.org/organization-geospatial/fig/dc-spatial-vector/pnt_line_poly.png)\n", @@ -515,7 +483,7 @@ }, { "cell_type": "markdown", - "id": "cellular-relations", + "id": "rental-england", "metadata": {}, "source": [ "### Isolate Polygon geometry within GeoDataFrame" @@ -524,7 +492,7 @@ { "cell_type": "code", "execution_count": null, - "id": "heavy-messenger", + "id": "exciting-camcorder", "metadata": {}, "outputs": [], "source": [ @@ -534,7 +502,7 @@ { "cell_type": "code", "execution_count": null, - "id": "interesting-dispatch", + "id": "alleged-frederick", "metadata": {}, "outputs": [], "source": [ @@ -544,7 +512,7 @@ { "cell_type": "code", "execution_count": null, - "id": "plastic-jackson", + "id": "usual-address", "metadata": {}, "outputs": [], "source": [ @@ -554,7 +522,7 @@ { "cell_type": "code", "execution_count": null, - "id": "located-conducting", + "id": "systematic-translation", "metadata": {}, "outputs": [], "source": [ @@ -564,7 +532,7 @@ { "cell_type": "code", "execution_count": null, - "id": "considerable-liver", + "id": "theoretical-egypt", "metadata": {}, "outputs": [], "source": [ @@ -575,7 +543,7 @@ { "cell_type": "code", "execution_count": null, - "id": "appreciated-township", + "id": "loaded-challenge", "metadata": {}, "outputs": [], "source": [ @@ -585,7 +553,7 @@ { "cell_type": "code", "execution_count": null, - "id": "distinct-ghost", + "id": "positive-purple", "metadata": {}, "outputs": [], "source": [ @@ -595,7 +563,7 @@ { "cell_type": "code", "execution_count": null, - "id": "executive-talent", + "id": "public-wellington", "metadata": {}, "outputs": [], "source": [ @@ -604,7 +572,7 @@ }, { "cell_type": "markdown", - "id": "negative-forty", + "id": "downtown-priority", "metadata": {}, "source": [ "### Generate boolean index for points that intersect the polygon\n", @@ -614,7 +582,7 @@ { "cell_type": "code", "execution_count": null, - "id": "equal-poster", + "id": "considered-herald", "metadata": {}, "outputs": [], "source": [ @@ -624,7 +592,7 @@ { "cell_type": "code", "execution_count": null, - "id": "surprising-exposure", + "id": "gorgeous-worry", "metadata": {}, "outputs": [], "source": [ @@ -634,7 +602,7 @@ { "cell_type": "code", "execution_count": null, - "id": "behavioral-intent", + "id": "concrete-property", "metadata": {}, "outputs": [], "source": [ @@ -643,7 +611,7 @@ }, { "cell_type": "markdown", - "id": "spectacular-exception", + "id": "latter-fifteen", "metadata": {}, "source": [ "### Use fancy indexing to isolate points and return new GeoDataFrame" @@ -652,7 +620,7 @@ { "cell_type": "code", "execution_count": null, - "id": "ancient-taylor", + "id": "lyric-penalty", "metadata": {}, "outputs": [], "source": [ @@ -662,7 +630,7 @@ { "cell_type": "code", "execution_count": null, - "id": "medium-ceremony", + "id": "extraordinary-repeat", "metadata": {}, "outputs": [], "source": [ @@ -671,7 +639,7 @@ }, { "cell_type": "markdown", - "id": "minute-czech", + "id": "embedded-upset", "metadata": {}, "source": [ "### Quick plot" @@ -680,7 +648,7 @@ { "cell_type": "code", "execution_count": null, - "id": "facial-punch", + "id": "committed-ecuador", "metadata": {}, "outputs": [], "source": [ @@ -694,7 +662,7 @@ { "cell_type": "code", "execution_count": null, - "id": "saved-small", + "id": "unlimited-inspector", "metadata": {}, "outputs": [], "source": [ @@ -705,7 +673,7 @@ { "cell_type": "code", "execution_count": null, - "id": "common-passion", + "id": "greatest-physiology", "metadata": {}, "outputs": [], "source": [ @@ -714,7 +682,7 @@ }, { "cell_type": "markdown", - "id": "flush-designation", + "id": "permanent-alexander", "metadata": {}, "source": [ "### Add a basemap\n", @@ -728,7 +696,7 @@ { "cell_type": "code", "execution_count": null, - "id": "drawn-rabbit", + "id": "applicable-plaintiff", "metadata": {}, "outputs": [], "source": [ @@ -738,7 +706,7 @@ { "cell_type": "code", "execution_count": null, - "id": "earned-masters", + "id": "colonial-deputy", "metadata": {}, "outputs": [], "source": [ @@ -748,7 +716,7 @@ }, { "cell_type": "markdown", - "id": "atmospheric-superintendent", + "id": "latest-tsunami", "metadata": {}, "source": [ "## Part 3: Time series analysis for one station\n", @@ -757,7 +725,7 @@ }, { "cell_type": "markdown", - "id": "legislative-prayer", + "id": "removable-creation", "metadata": {}, "source": [ "https://wcc.sc.egov.usda.gov/nwcc/site?sitenum=622&state=co" @@ -766,7 +734,7 @@ { "cell_type": "code", "execution_count": null, - "id": "metropolitan-aging", + "id": "retained-southwest", "metadata": {}, "outputs": [], "source": [ @@ -776,7 +744,7 @@ }, { "cell_type": "markdown", - "id": "grateful-reporter", + "id": "hourly-watson", "metadata": {}, "source": [ "### Get available measurements for this site\n", @@ -786,7 +754,7 @@ { "cell_type": "code", "execution_count": null, - "id": "honest-division", + "id": "recovered-entry", "metadata": {}, "outputs": [], "source": [ @@ -795,7 +763,7 @@ }, { "cell_type": "markdown", - "id": "adapted-blade", + "id": "registered-queens", "metadata": {}, "source": [ "* _H = \"hourly\"\n", @@ -805,7 +773,7 @@ }, { "cell_type": "markdown", - "id": "motivated-malawi", + "id": "reduced-thought", "metadata": {}, "source": [ "### Let's consider the 'SNOTEL:SNWD_D' variable (Daily Snow Depth)\n", @@ -818,7 +786,7 @@ { "cell_type": "code", "execution_count": null, - "id": "injured-rapid", + "id": "individual-college", "metadata": {}, "outputs": [], "source": [ @@ -831,7 +799,7 @@ { "cell_type": "code", "execution_count": null, - "id": "blocked-mechanism", + "id": "extensive-helen", "metadata": {}, "outputs": [], "source": [ @@ -844,7 +812,7 @@ { "cell_type": "code", "execution_count": null, - "id": "operational-czech", + "id": "unsigned-genome", "metadata": {}, "outputs": [], "source": [ @@ -853,7 +821,7 @@ }, { "cell_type": "markdown", - "id": "joint-dayton", + "id": "pursuant-friday", "metadata": {}, "source": [ "### Define a function to fetch data\n", @@ -864,7 +832,7 @@ { "cell_type": "code", "execution_count": null, - "id": "patent-style", + "id": "enormous-clause", "metadata": {}, "outputs": [], "source": [ @@ -895,7 +863,7 @@ }, { "cell_type": "markdown", - "id": "native-episode", + "id": "local-construction", "metadata": {}, "source": [ "### Use this function to get the full 'SNOTEL:SNWD_D' record for one station\n", @@ -906,7 +874,7 @@ { "cell_type": "code", "execution_count": null, - "id": "adaptive-dinner", + "id": "environmental-leonard", "metadata": {}, "outputs": [], "source": [ @@ -918,7 +886,7 @@ { "cell_type": "code", "execution_count": null, - "id": "facial-inventory", + "id": "vital-opening", "metadata": {}, "outputs": [], "source": [ @@ -930,7 +898,7 @@ { "cell_type": "code", "execution_count": null, - "id": "crude-walker", + "id": "anticipated-mechanism", "metadata": {}, "outputs": [], "source": [ @@ -940,7 +908,7 @@ { "cell_type": "code", "execution_count": null, - "id": "capable-temple", + "id": "great-sharing", "metadata": {}, "outputs": [], "source": [ @@ -951,7 +919,7 @@ }, { "cell_type": "markdown", - "id": "subsequent-grill", + "id": "pleasant-belarus", "metadata": {}, "source": [ "### Create a quick plot to view the time series\n", @@ -962,7 +930,7 @@ { "cell_type": "code", "execution_count": null, - "id": "mounted-bubble", + "id": "ongoing-livestock", "metadata": {}, "outputs": [], "source": [ @@ -971,7 +939,7 @@ }, { "cell_type": "markdown", - "id": "restricted-absence", + "id": "relevant-conducting", "metadata": {}, "source": [ "### Compute the integer day of year (doy) and integer day of water year (dowy)\n", @@ -986,7 +954,7 @@ { "cell_type": "code", "execution_count": null, - "id": "needed-secondary", + "id": "arabic-second", "metadata": {}, "outputs": [], "source": [ @@ -1005,7 +973,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fallen-recall", + "id": "talented-inventory", "metadata": {}, "outputs": [], "source": [ @@ -1014,7 +982,7 @@ }, { "cell_type": "markdown", - "id": "accomplished-sequence", + "id": "hollow-denial", "metadata": {}, "source": [ "### Compute statistics for each day of the water year, using values from all years\n", @@ -1025,7 +993,7 @@ { "cell_type": "code", "execution_count": null, - "id": "consecutive-mauritius", + "id": "relative-mustang", "metadata": {}, "outputs": [], "source": [ @@ -1035,7 +1003,7 @@ { "cell_type": "code", "execution_count": null, - "id": "offensive-biology", + "id": "joint-creator", "metadata": {}, "outputs": [], "source": [ @@ -1045,7 +1013,7 @@ }, { "cell_type": "markdown", - "id": "living-grove", + "id": "funny-density", "metadata": {}, "source": [ "### Create a plot of these aggregated dowy values\n", @@ -1055,7 +1023,7 @@ { "cell_type": "code", "execution_count": null, - "id": "structured-zambia", + "id": "knowing-bishop", "metadata": {}, "outputs": [], "source": [ @@ -1080,7 +1048,7 @@ }, { "cell_type": "markdown", - "id": "intellectual-doctor", + "id": "forty-effects", "metadata": {}, "source": [ "### Add the daily snow depth values for the current water year\n", @@ -1093,7 +1061,7 @@ { "cell_type": "code", "execution_count": null, - "id": "premium-bristol", + "id": "protected-diary", "metadata": {}, "outputs": [], "source": [ @@ -1105,7 +1073,7 @@ { "cell_type": "code", "execution_count": null, - "id": "lesser-tract", + "id": "checked-metabolism", "metadata": {}, "outputs": [], "source": [ @@ -1116,7 +1084,7 @@ { "cell_type": "code", "execution_count": null, - "id": "colonial-mattress", + "id": "italic-angle", "metadata": {}, "outputs": [], "source": [ @@ -1139,7 +1107,7 @@ }, { "cell_type": "markdown", - "id": "lovely-theology", + "id": "solar-swaziland", "metadata": {}, "source": [ "### What was the percentage of \"normal\" snow depth on April 1 of this year?\n", @@ -1149,7 +1117,7 @@ { "cell_type": "code", "execution_count": null, - "id": "integrated-numbers", + "id": "institutional-velvet", "metadata": {}, "outputs": [], "source": [ @@ -1159,7 +1127,7 @@ { "cell_type": "code", "execution_count": null, - "id": "emerging-rogers", + "id": "considerable-relations", "metadata": {}, "outputs": [], "source": [ @@ -1170,7 +1138,7 @@ { "cell_type": "code", "execution_count": null, - "id": "physical-extreme", + "id": "chicken-meeting", "metadata": {}, "outputs": [], "source": [ @@ -1180,7 +1148,7 @@ { "cell_type": "code", "execution_count": null, - "id": "falling-warren", + "id": "breathing-transportation", "metadata": {}, "outputs": [], "source": [ @@ -1191,7 +1159,7 @@ }, { "cell_type": "markdown", - "id": "educational-murray", + "id": "breeding-situation", "metadata": {}, "source": [ "### Index DataFrame by date or date range\n", @@ -1201,7 +1169,7 @@ { "cell_type": "code", "execution_count": null, - "id": "complicated-racing", + "id": "directed-american", "metadata": {}, "outputs": [], "source": [ @@ -1212,7 +1180,7 @@ { "cell_type": "code", "execution_count": null, - "id": "static-separate", + "id": "parallel-there", "metadata": {}, "outputs": [], "source": [ @@ -1223,7 +1191,7 @@ { "cell_type": "code", "execution_count": null, - "id": "domestic-pride", + "id": "bored-weapon", "metadata": {}, "outputs": [], "source": [ @@ -1233,7 +1201,7 @@ { "cell_type": "code", "execution_count": null, - "id": "applied-broadcasting", + "id": "square-edwards", "metadata": {}, "outputs": [], "source": [ @@ -1242,7 +1210,7 @@ }, { "cell_type": "markdown", - "id": "proud-horse", + "id": "damaged-samuel", "metadata": {}, "source": [ "### Query multiple sites\n", @@ -1252,7 +1220,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fancy-cassette", + "id": "minor-islam", "metadata": {}, "outputs": [], "source": [ @@ -1270,7 +1238,7 @@ { "cell_type": "code", "execution_count": null, - "id": "standing-estate", + "id": "removed-proportion", "metadata": {}, "outputs": [], "source": [ @@ -1280,7 +1248,7 @@ { "cell_type": "code", "execution_count": null, - "id": "expanded-printer", + "id": "sorted-barrier", "metadata": {}, "outputs": [], "source": [ @@ -1290,7 +1258,7 @@ { "cell_type": "code", "execution_count": null, - "id": "tracked-receipt", + "id": "alike-request", "metadata": {}, "outputs": [], "source": [ @@ -1301,7 +1269,7 @@ { "cell_type": "code", "execution_count": null, - "id": "civil-score", + "id": "funny-table", "metadata": {}, "outputs": [], "source": [ @@ -1310,7 +1278,7 @@ }, { "cell_type": "markdown", - "id": "immediate-rwanda", + "id": "square-acquisition", "metadata": {}, "source": [ "### Scatterplot to compare corresponding values" @@ -1319,7 +1287,7 @@ { "cell_type": "code", "execution_count": null, - "id": "twenty-brick", + "id": "antique-aside", "metadata": {}, "outputs": [], "source": [ @@ -1329,7 +1297,7 @@ }, { "cell_type": "markdown", - "id": "happy-union", + "id": "atlantic-reservation", "metadata": {}, "source": [ "### Determine Pearson's correlation coefficient for the two time series\n", @@ -1341,7 +1309,7 @@ { "cell_type": "code", "execution_count": null, - "id": "transsexual-syria", + "id": "mexican-strategy", "metadata": {}, "outputs": [], "source": [ @@ -1350,19 +1318,11 @@ }, { "cell_type": "markdown", - "id": "romantic-fusion", + "id": "native-disabled", "metadata": {}, "source": [ "Highly correlated snow depth records for these two sites!" ] - }, - { - "cell_type": "markdown", - "id": "magnetic-transportation", - "metadata": {}, - "source": [ - "## Summary" - ] } ], "metadata": { diff --git a/book/tutorials/geospatial/raster.ipynb b/book/tutorials/geospatial/raster.ipynb index b6ef4c8..42570ee 100644 --- a/book/tutorials/geospatial/raster.ipynb +++ b/book/tutorials/geospatial/raster.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "essential-origin", + "id": "capital-retreat", "metadata": {}, "source": [ "# Raster data\n", @@ -17,7 +17,7 @@ }, { "cell_type": "markdown", - "id": "rubber-packet", + "id": "plain-municipality", "metadata": {}, "source": [ "## Raster Basics\n", @@ -33,7 +33,7 @@ { "cell_type": "code", "execution_count": null, - "id": "tender-water", + "id": "potential-binary", "metadata": {}, "outputs": [], "source": [ @@ -57,9 +57,28 @@ "#plt.rcParams.update({'font.size': 16}) # make matplotlib font sizes bigger" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "engaged-definition", + "metadata": {}, + "outputs": [], + "source": [ + "%%bash \n", + "\n", + "# Retrieve a copy of data files used in this tutorial from Zenodo.org:\n", + "# Re-running this cell will not re-download things if they already exist\n", + "\n", + "mkdir -p /tmp/tutorial-data\n", + "cd /tmp/tutorial-data\n", + "wget -q -nc -O data.zip https://zenodo.org/record/5504396/files/geospatial.zip\n", + "unzip -q -n data.zip\n", + "rm data.zip" + ] + }, { "cell_type": "markdown", - "id": "entitled-stomach", + "id": "later-czech", "metadata": {}, "source": [ "## Elevation rasters\n", @@ -77,18 +96,23 @@ { "cell_type": "code", "execution_count": null, - "id": "accurate-bleeding", + "id": "designing-sellers", "metadata": {}, "outputs": [], "source": [ - "# Get data from LPDAAC and unzip\n", - "!wget -q -nc https://e4ftl01.cr.usgs.gov//DP132/MEASURES/NASADEM_HGT.001/2000.02.11/NASADEM_HGT_n39w109.zip\n", - "!unzip -n NASADEM_HGT_n39w109.zip " + "#%%bash\n", + "\n", + "# Get data directly from NASA LPDAAC and unzip\n", + "\n", + "#DATADIR='/tmp/tutorial-data/geospatial/raster'\n", + "#mkdir -p ${DATADIR}\n", + "#wget -q -nc https://e4ftl01.cr.usgs.gov/DP132/MEASURES/NASADEM_HGT.001/2000.02.11/NASADEM_HGT_n39w109.zip \n", + "#unzip -n NASADEM_HGT_n39w109.zip -d ${DATADIR}" ] }, { "cell_type": "markdown", - "id": "accessory-eclipse", + "id": "random-deployment", "metadata": {}, "source": [ "### Rasterio\n", @@ -99,13 +123,13 @@ { "cell_type": "code", "execution_count": null, - "id": "stable-bahrain", + "id": "occasional-transfer", "metadata": {}, "outputs": [], "source": [ - "\n", - "# NOTE: This reads just the metadata into memory, not the whole file\n", - "path = 'n39w109.hgt'\n", + "# Open a raster image in a zipped archive\n", + "# https://rasterio.readthedocs.io/en/latest/topics/datasets.html\n", + "path = 'zip:///tmp/tutorial-data/geospatial/NASADEM_HGT_n39w109.zip!n39w109.hgt'\n", "with rasterio.open(path) as src:\n", " print(src.profile)" ] @@ -113,7 +137,7 @@ { "cell_type": "code", "execution_count": null, - "id": "afraid-questionnaire", + "id": "casual-colombia", "metadata": {}, "outputs": [], "source": [ @@ -129,7 +153,7 @@ { "cell_type": "code", "execution_count": null, - "id": "involved-founder", + "id": "contemporary-japan", "metadata": {}, "outputs": [], "source": [ @@ -142,7 +166,7 @@ { "cell_type": "code", "execution_count": null, - "id": "falling-partner", + "id": "exclusive-roller", "metadata": {}, "outputs": [], "source": [ @@ -163,7 +187,7 @@ }, { "cell_type": "markdown", - "id": "operational-muslim", + "id": "final-momentum", "metadata": {}, "source": [ "### Rioxarray\n", @@ -176,7 +200,7 @@ { "cell_type": "code", "execution_count": null, - "id": "active-garage", + "id": "amber-prayer", "metadata": {}, "outputs": [], "source": [ @@ -186,7 +210,7 @@ }, { "cell_type": "markdown", - "id": "ethical-height", + "id": "given-pearl", "metadata": {}, "source": [ "```{note}\n", @@ -197,7 +221,7 @@ { "cell_type": "code", "execution_count": null, - "id": "hindu-mainland", + "id": "olympic-grammar", "metadata": {}, "outputs": [], "source": [ @@ -210,7 +234,7 @@ { "cell_type": "code", "execution_count": null, - "id": "located-times", + "id": "suburban-windsor", "metadata": {}, "outputs": [], "source": [ @@ -222,7 +246,7 @@ { "cell_type": "code", "execution_count": null, - "id": "theoretical-eligibility", + "id": "anonymous-hawaii", "metadata": {}, "outputs": [], "source": [ @@ -234,7 +258,7 @@ { "cell_type": "code", "execution_count": null, - "id": "surrounded-buffer", + "id": "sitting-diesel", "metadata": {}, "outputs": [], "source": [ @@ -248,7 +272,7 @@ { "cell_type": "code", "execution_count": null, - "id": "cutting-riding", + "id": "connected-necessity", "metadata": {}, "outputs": [], "source": [ @@ -260,7 +284,7 @@ { "cell_type": "code", "execution_count": null, - "id": "treated-hierarchy", + "id": "exclusive-mongolia", "metadata": {}, "outputs": [], "source": [ @@ -273,7 +297,7 @@ { "cell_type": "code", "execution_count": null, - "id": "aquatic-summary", + "id": "handy-accounting", "metadata": {}, "outputs": [], "source": [ @@ -285,7 +309,7 @@ { "cell_type": "code", "execution_count": null, - "id": "auburn-algeria", + "id": "unexpected-fetish", "metadata": {}, "outputs": [], "source": [ @@ -297,7 +321,7 @@ }, { "cell_type": "markdown", - "id": "nasty-deployment", + "id": "loved-bangladesh", "metadata": {}, "source": [ "## Comparing rasters\n", @@ -311,18 +335,18 @@ { "cell_type": "code", "execution_count": null, - "id": "round-easter", + "id": "preliminary-planning", "metadata": {}, "outputs": [], "source": [ - "# Can use AWS CLI to interact with this data\n", + "# Can use AWS CLI to interact with this open data\n", "!aws --no-sign-request s3 ls s3://copernicus-dem-30m/Copernicus_DSM_COG_10_N39_00_W109_00_DEM/Copernicus_DSM_COG_10_N39_00_W109_00_DEM.tif" ] }, { "cell_type": "code", "execution_count": null, - "id": "assumed-birthday", + "id": "amended-sensitivity", "metadata": {}, "outputs": [], "source": [ @@ -342,7 +366,7 @@ { "cell_type": "code", "execution_count": null, - "id": "sharp-biology", + "id": "working-lounge", "metadata": {}, "outputs": [], "source": [ @@ -359,7 +383,7 @@ { "cell_type": "code", "execution_count": null, - "id": "external-stable", + "id": "distant-joint", "metadata": {}, "outputs": [], "source": [ @@ -370,20 +394,24 @@ { "cell_type": "code", "execution_count": null, - "id": "little-recognition", + "id": "accompanied-category", "metadata": {}, "outputs": [], "source": [ "# Ensure the grid of one raster exactly matches another (same projection, resolution, and extents)\n", - "# NOTE: these two raster happen to already be on an aligned grid\n", - "daR = daC.rio.reproject_match(da)\n", + "# NOTE: these two rasters happen to already be on an aligned grid\n", + "\n", + "# There are many options for how to resample a warped raster grid (nearest, bilinear, etc)\n", + "#print(list(rasterio.enums.Resampling))\n", + "\n", + "daR = daC.rio.reproject_match(da, resampling=rasterio.enums.Resampling.nearest)\n", "daR" ] }, { "cell_type": "code", "execution_count": null, - "id": "concerned-owner", + "id": "spectacular-logic", "metadata": {}, "outputs": [], "source": [ @@ -397,7 +425,7 @@ { "cell_type": "code", "execution_count": null, - "id": "exclusive-snake", + "id": "touched-effects", "metadata": {}, "outputs": [], "source": [ @@ -411,7 +439,7 @@ }, { "cell_type": "markdown", - "id": "impressed-testament", + "id": "classified-exclusion", "metadata": {}, "source": [ "```{warning}\n", @@ -427,7 +455,7 @@ { "cell_type": "code", "execution_count": null, - "id": "renewable-channels", + "id": "engaged-verification", "metadata": {}, "outputs": [], "source": [ @@ -439,7 +467,7 @@ { "cell_type": "code", "execution_count": null, - "id": "beginning-frank", + "id": "structural-shade", "metadata": {}, "outputs": [], "source": [ @@ -455,7 +483,7 @@ { "cell_type": "code", "execution_count": null, - "id": "pointed-flour", + "id": "weighted-pulse", "metadata": {}, "outputs": [], "source": [ @@ -465,7 +493,7 @@ }, { "cell_type": "markdown", - "id": "tough-essex", + "id": "empty-barrier", "metadata": {}, "source": [ "```{admonition} execercises\n", diff --git a/book/tutorials/lidar/ASO_data_tutorial.ipynb b/book/tutorials/lidar/ASO_data_tutorial.ipynb index 6757214..c849db5 100644 --- a/book/tutorials/lidar/ASO_data_tutorial.ipynb +++ b/book/tutorials/lidar/ASO_data_tutorial.ipynb @@ -82,15 +82,6 @@ "### Import the packages needed for this tutorial" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!pip install pycrs>=1 --no-deps" - ] - }, { "cell_type": "code", "execution_count": null, @@ -115,18 +106,7 @@ "from shapely.geometry import box\n", "\n", "# import packages for viewing the data\n", - "import matplotlib.pyplot as pyplot\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "#define paths\n", - "import os\n", - "CURDIR = os.path.dirname(os.path.realpath(\"__file__\"))" + "import matplotlib.pyplot as pyplot" ] }, { @@ -965,13 +945,6 @@ "\n", "But you can find ASO bare earth DTMs and other ASO data, including 3 m and 50 m snow depth, SWE across other sites and years [here](https://nsidc.org/data/aso/data-summaries)\n" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/book/tutorials/lidar/ICESat2_tutorial.ipynb b/book/tutorials/lidar/ICESat2_tutorial.ipynb index 0976278..de8dfe4 100644 --- a/book/tutorials/lidar/ICESat2_tutorial.ipynb +++ b/book/tutorials/lidar/ICESat2_tutorial.ipynb @@ -3,7 +3,7 @@ { "cell_type": "code", "execution_count": null, - "id": "systematic-habitat", + "id": "current-friendship", "metadata": {}, "outputs": [], "source": [ @@ -17,14 +17,13 @@ "from IPython.core.display import HTML \n", "import hvplot.xarray\n", "import pandas as pd\n", - "import rioxarray\n", - "import s3fs" + "import rioxarray" ] }, { "cell_type": "code", "execution_count": null, - "id": "featured-lebanon", + "id": "together-preparation", "metadata": {}, "outputs": [], "source": [ @@ -35,10 +34,10 @@ }, { "cell_type": "markdown", - "id": "headed-genre", + "id": "presidential-emerald", "metadata": {}, "source": [ - "# Introduction\n", + "# ICESat-2\n", "\n", "ICESat-2 is a laser altimeter designed to precisely measure the height of snow and ice surfaces using green lasers with small footprints. Although ICESat-2 doesn't measure surface heights with the same spatial density as airborne laser altimeters, its global spatial coverage makes it a tempting source of free data about snow surfaces. In this tutorial we will:\n", "\n", @@ -50,7 +49,7 @@ "\n", "4. Request custom processed height estimates from the SlideRule project.\n", "\n", - "## ICESat-2 measurements and coverage\n", + "## Measurements and coverage\n", "\n", "ICESat-2 measures surface heights with six laser beams, grouped into three pairs separated by 3 km, with a 90-m separation between the beams in each pair.\n", "\n", @@ -60,7 +59,7 @@ { "cell_type": "code", "execution_count": null, - "id": "utility-salad", + "id": "impressive-greeting", "metadata": {}, "outputs": [], "source": [ @@ -69,7 +68,7 @@ }, { "cell_type": "markdown", - "id": "finite-november", + "id": "critical-queens", "metadata": {}, "source": [ "ICESat-2 flies a repeat orbit with 1387 ground tracks every 91 days, but over Grand Mesa, the collection strategy (up until now) has designed to optimize spatial coverage, so the measurements are shifted to the left and right of the repeat tracks to help densify the dataset. We should expect to see tracks running (approximately) north-south over the Mesa, in tripplets of pairs that are scattered from east to west. Because clouds often block the laser, not every track will return usable data.\n" @@ -78,7 +77,7 @@ { "cell_type": "code", "execution_count": null, - "id": "bridal-christian", + "id": "annual-bottom", "metadata": {}, "outputs": [], "source": [ @@ -87,7 +86,7 @@ }, { "cell_type": "markdown", - "id": "given-slovak", + "id": "julian-enterprise", "metadata": {}, "source": [ "We describe ICESat-2's beam layout on the ground based on pairs (numbered 1, 2, and 3, from left to right) and the location of each beam in each pair (L, R). Thus GT2L is the left beam in the center pair. In each pair, one beam is always stronger than the other (to help penetrate thin clouds), but since the spacecraft sometimes reverses its orientation to keep the solar panels illuminated, the strong beam can be either left or right, depending on the phase of the mission.\n" @@ -95,7 +94,7 @@ }, { "cell_type": "markdown", - "id": "educational-density", + "id": "tired-france", "metadata": {}, "source": [ "## Basemap (Sentinel)\n", @@ -106,7 +105,7 @@ { "cell_type": "code", "execution_count": null, - "id": "listed-textbook", + "id": "indoor-interference", "metadata": {}, "outputs": [], "source": [ @@ -140,7 +139,7 @@ }, { "cell_type": "markdown", - "id": "undefined-investor", + "id": "liable-bradford", "metadata": {}, "source": [ "## Searching for ICESat-2 data using IcePyx\n", @@ -151,7 +150,7 @@ { "cell_type": "code", "execution_count": null, - "id": "oriental-agriculture", + "id": "complicated-reach", "metadata": {}, "outputs": [], "source": [ @@ -164,7 +163,7 @@ }, { "cell_type": "markdown", - "id": "arranged-subscriber", + "id": "abroad-bracket", "metadata": {}, "source": [ "To run this next section, you'll need to setup your netrc file to connect to nasa earthdata. During the hackweek we will use machine credentials, but afterwards, you may need to use your own credentials. The login procedure is in the next cell, commented out." @@ -173,7 +172,7 @@ { "cell_type": "code", "execution_count": null, - "id": "medical-young", + "id": "utility-growth", "metadata": {}, "outputs": [], "source": [ @@ -184,7 +183,7 @@ }, { "cell_type": "markdown", - "id": "urban-sculpture", + "id": "scientific-handle", "metadata": {}, "source": [ "Once we're logged in, the avail_granules() fetches a list of available ATL03 granules:" @@ -193,7 +192,7 @@ { "cell_type": "code", "execution_count": null, - "id": "exact-carnival", + "id": "growing-trick", "metadata": {}, "outputs": [], "source": [ @@ -202,7 +201,7 @@ }, { "cell_type": "markdown", - "id": "fundamental-clock", + "id": "cultural-order", "metadata": {}, "source": [ "The filename for each granule (which contains lots of handy information) is in the 'producer_granule_id' field: " @@ -211,7 +210,7 @@ { "cell_type": "code", "execution_count": null, - "id": "measured-bermuda", + "id": "divine-degree", "metadata": {}, "outputs": [], "source": [ @@ -220,7 +219,7 @@ }, { "cell_type": "markdown", - "id": "angry-effect", + "id": "charged-probe", "metadata": {}, "source": [ "The filename contains ATL03_YYYYMMDDHHMMSS_TTTTCCRR_rrr_vv.h5 where:\n", @@ -237,7 +236,7 @@ { "cell_type": "code", "execution_count": null, - "id": "happy-syndrome", + "id": "inside-delicious", "metadata": {}, "outputs": [], "source": [ @@ -254,7 +253,7 @@ }, { "cell_type": "markdown", - "id": "cooperative-device", + "id": "prepared-engineer", "metadata": {}, "source": [ "From this point, the very capable icepyx interface allows you to order either full data granules or subsets of granules from NSIDC. Further details are available from https://icepyx.readthedocs.io/en/latest/, and their 'examples' pages are quite helpful. Note that ATL03 photon data granules are somewhat cumbersome, so downloading them without subsetting will be time consuming, and requesting subsetting from NSIDC may take a while. \n", @@ -266,7 +265,7 @@ { "cell_type": "code", "execution_count": null, - "id": "located-banana", + "id": "guided-enough", "metadata": {}, "outputs": [], "source": [ @@ -330,7 +329,7 @@ { "cell_type": "code", "execution_count": null, - "id": "explicit-oregon", + "id": "rocky-showcase", "metadata": {}, "outputs": [], "source": [ @@ -341,7 +340,7 @@ { "cell_type": "code", "execution_count": null, - "id": "diverse-catering", + "id": "ambient-thumb", "metadata": {}, "outputs": [], "source": [ @@ -355,7 +354,7 @@ }, { "cell_type": "markdown", - "id": "accomplished-tourist", + "id": "affected-grass", "metadata": {}, "source": [ "What we see in this plot is Grand Mesa, with lines showing data from the center beams of several tracks passing across it. A few of these tracks have been repeated, but most are offset from the others. Looking at these, it should be clear that the quality of the data is not consistent from track to track. Some are nearly continuous, others have gaps, and other still have no data at all and are not plotted here. Remember, though, that what we've plotted here are just the center beams. There are a total of two more beam pairs, and a total of five more beams!\n", @@ -366,7 +365,7 @@ { "cell_type": "code", "execution_count": null, - "id": "higher-suicide", + "id": "composite-holly", "metadata": {}, "outputs": [], "source": [ @@ -379,7 +378,7 @@ { "cell_type": "code", "execution_count": null, - "id": "placed-crawford", + "id": "macro-shannon", "metadata": {}, "outputs": [], "source": [ @@ -400,7 +399,7 @@ }, { "cell_type": "markdown", - "id": "african-mambo", + "id": "alike-blake", "metadata": {}, "source": [ "Based on the axis limits I filled in, Track 295 has two repeats over the mesa that nearly coincide.\n", @@ -411,7 +410,7 @@ { "cell_type": "code", "execution_count": null, - "id": "modified-general", + "id": "animated-screening", "metadata": {}, "outputs": [], "source": [ @@ -421,7 +420,7 @@ { "cell_type": "code", "execution_count": null, - "id": "active-ireland", + "id": "extra-salem", "metadata": {}, "outputs": [], "source": [ @@ -451,7 +450,7 @@ }, { "cell_type": "markdown", - "id": "adult-oxygen", + "id": "received-strap", "metadata": {}, "source": [ "On the left we see a plot of all six beams crossing (or almost crossing) Grand Mesa, in April of 2020. If you zoom in on the plot, you can distinguish the beam pairs into separate beams. On the right, we see one of the central beams crossing the mesa from south to north. There is a broad band of noise photons that were close enough to the ground to be telemetered by the satellite, and a much narrower band (in red) of photons identified by the processing software as likely coming from the ground." @@ -459,7 +458,7 @@ }, { "cell_type": "markdown", - "id": "toxic-navigation", + "id": "piano-annex", "metadata": {}, "source": [ "These data give a maximum of detail about what the surface looks like to ICESat-2. to reduce this to elevation data, telling the surface height at specific locations, there are a few options:\n", @@ -473,7 +472,7 @@ }, { "cell_type": "markdown", - "id": "subject-scott", + "id": "automotive-alberta", "metadata": {}, "source": [ "## Ordering surface-height segments from SlideRule\n", @@ -484,7 +483,7 @@ }, { "cell_type": "markdown", - "id": "organic-thumb", + "id": "prime-vermont", "metadata": {}, "source": [ "You'll need to install the sliderule-python package, available from https://github.com/ICESat2-SlideRule/sliderule-python\n", @@ -494,7 +493,7 @@ { "cell_type": "code", "execution_count": null, - "id": "separated-thailand", + "id": "federal-aside", "metadata": {}, "outputs": [], "source": [ @@ -504,7 +503,7 @@ }, { "cell_type": "markdown", - "id": "practical-lender", + "id": "approved-edgar", "metadata": {}, "source": [ "We will submit a query to sliderule to process all of the data that CMR finds for our region, fitting 20-meter line-segments to all of the photons with medium-or-better signal confidence" @@ -513,7 +512,7 @@ { "cell_type": "code", "execution_count": null, - "id": "focused-grounds", + "id": "turned-google", "metadata": {}, "outputs": [], "source": [ @@ -550,7 +549,7 @@ }, { "cell_type": "markdown", - "id": "outstanding-warrant", + "id": "reported-hazard", "metadata": {}, "source": [ "SlideRule complains when it tries to calculate heights within our ROI for ground tracks that don't intersect the ROI. This happens quite a bit because the CMR service that IcePyx and SlideRule use to search for the data uses a generous buffer on each ICESat-2 track. It shouldn't bother us. In fact, we have quite a few tracks for our region.\n", @@ -561,7 +560,7 @@ { "cell_type": "code", "execution_count": null, - "id": "blocked-musical", + "id": "senior-shepherd", "metadata": {}, "outputs": [], "source": [ @@ -574,7 +573,7 @@ }, { "cell_type": "markdown", - "id": "logical-press", + "id": "technical-tracker", "metadata": {}, "source": [ "As we saw a few cells up, for track 295 cycles 7 and 8 are nearly exact repeats. Cycle 7 was April 2020, cycle 8 was July 2020. Could it be that we can measure snow depth in April by comparing the two? Let's plot spot 3 for both!" @@ -583,7 +582,7 @@ { "cell_type": "code", "execution_count": null, - "id": "amended-magic", + "id": "precise-column", "metadata": {}, "outputs": [], "source": [ @@ -601,7 +600,7 @@ }, { "cell_type": "markdown", - "id": "jewish-corner", + "id": "unusual-merchandise", "metadata": {}, "source": [ "To try to get at snow depth, we can look for bare-earth DTMs here:\n", @@ -612,7 +611,7 @@ { "cell_type": "code", "execution_count": null, - "id": "dying-truck", + "id": "persistent-click", "metadata": {}, "outputs": [], "source": [ @@ -639,7 +638,7 @@ { "cell_type": "code", "execution_count": null, - "id": "increasing-listing", + "id": "official-monitor", "metadata": {}, "outputs": [], "source": [ @@ -649,7 +648,7 @@ }, { "cell_type": "markdown", - "id": "instant-titanium", + "id": "statutory-emergency", "metadata": {}, "source": [ "To compare the DTM directly with the ICESat-2 data, we'll need to sample it at the ICESat-2 points. There are probably ways to do this directly in xarray, but I'm not an expert. Here we'll use a scipy interpolator:" @@ -658,7 +657,7 @@ { "cell_type": "code", "execution_count": null, - "id": "patent-decline", + "id": "fatal-tribute", "metadata": {}, "outputs": [], "source": [ @@ -670,7 +669,7 @@ { "cell_type": "code", "execution_count": null, - "id": "endangered-culture", + "id": "emerging-jamaica", "metadata": {}, "outputs": [], "source": [ @@ -687,7 +686,7 @@ { "cell_type": "code", "execution_count": null, - "id": "disciplinary-scottish", + "id": "personalized-filling", "metadata": {}, "outputs": [], "source": [ @@ -707,7 +706,7 @@ }, { "cell_type": "markdown", - "id": "varying-nurse", + "id": "effective-resource", "metadata": {}, "source": [ "The DTM is below the April ICESat-2 heights. That's probably not right, and it's because we don't have the vertical datums correct here (ICESat-2 WGS84, the DEM is NAD83). That's OK! Since we have multiple passes over the same DEM, we can use the DEM to correct for spatial offsets between the measurements. Let's use the DEM to correct for differences between the July and April data:" @@ -716,7 +715,7 @@ { "cell_type": "code", "execution_count": null, - "id": "realistic-senator", + "id": "aboriginal-possible", "metadata": {}, "outputs": [], "source": [ @@ -743,7 +742,7 @@ }, { "cell_type": "markdown", - "id": "immune-minister", + "id": "foreign-auction", "metadata": {}, "source": [ "This looks good, if a little noisy. We could get a better comparison by (1) using multiple ICESat-2 tracks to extract a mean snow-off difference between the DTM and ICESat-2, or (2). finding adjacent pairs of measurements between the two tracks, and comparing their heights directly. These are both good goals for projects!\n" @@ -751,7 +750,7 @@ }, { "cell_type": "markdown", - "id": "atomic-madness", + "id": "naval-discount", "metadata": {}, "source": [ "## Further reading:\n", @@ -768,7 +767,7 @@ { "cell_type": "code", "execution_count": null, - "id": "vertical-simon", + "id": "expanded-prince", "metadata": {}, "outputs": [], "source": [] diff --git a/book/tutorials/machine-learning/Machine_Learning_Tutorial.ipynb b/book/tutorials/machine-learning/Machine_Learning_Tutorial.ipynb index 7006f8c..9561dcc 100644 --- a/book/tutorials/machine-learning/Machine_Learning_Tutorial.ipynb +++ b/book/tutorials/machine-learning/Machine_Learning_Tutorial.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "requested-victor", + "id": "three-bottom", "metadata": {}, "source": [ "# Introduction to Machine Learning \n", @@ -12,7 +12,7 @@ }, { "cell_type": "markdown", - "id": "competent-honduras", + "id": "nervous-drove", "metadata": {}, "source": [ "## Learning Outcomes\n", @@ -75,7 +75,7 @@ }, { "cell_type": "markdown", - "id": "outer-gallery", + "id": "extended-measurement", "metadata": {}, "source": [ "## Load Dataset\n", @@ -88,7 +88,7 @@ { "cell_type": "code", "execution_count": null, - "id": "tight-blogger", + "id": "tutorial-missouri", "metadata": {}, "outputs": [], "source": [ @@ -101,7 +101,7 @@ { "cell_type": "code", "execution_count": null, - "id": "checked-passenger", + "id": "corrected-exhaust", "metadata": {}, "outputs": [], "source": [ @@ -112,7 +112,7 @@ { "cell_type": "code", "execution_count": null, - "id": "capital-shelf", + "id": "devoted-jimmy", "metadata": {}, "outputs": [], "source": [ @@ -123,7 +123,7 @@ }, { "cell_type": "markdown", - "id": "traditional-hobby", + "id": "academic-paragraph", "metadata": {}, "source": [ "The data used in this tutorial is already clean. The data cleaning was done in a separate notebook, and it is available for anyone interested." @@ -131,7 +131,7 @@ }, { "cell_type": "markdown", - "id": "cleared-bradley", + "id": "shaped-camping", "metadata": {}, "source": [ "## Train and Test Sets\n", @@ -146,7 +146,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fantastic-filter", + "id": "gentle-ethernet", "metadata": {}, "outputs": [], "source": [ @@ -156,7 +156,7 @@ }, { "cell_type": "markdown", - "id": "basic-kruger", + "id": "herbal-equivalent", "metadata": {}, "source": [ "### Inspect the Data\n", @@ -169,7 +169,7 @@ { "cell_type": "code", "execution_count": null, - "id": "danish-healthcare", + "id": "caroline-duplicate", "metadata": {}, "outputs": [], "source": [ @@ -178,7 +178,7 @@ }, { "cell_type": "markdown", - "id": "adjustable-korea", + "id": "unable-natural", "metadata": {}, "source": [ "Each panel of the scatterplot matrix is a scatterplot for a pair of variables whose identities are given by the corresponding row and column labels. None of the features have a linear relationship with $\\texttt{snow_depth}$. This may indicate that a linear model might not be the best option." @@ -186,7 +186,7 @@ }, { "cell_type": "markdown", - "id": "tested-magnet", + "id": "speaking-glossary", "metadata": {}, "source": [ "**Descriptive Statistics**\n", @@ -204,7 +204,7 @@ { "cell_type": "code", "execution_count": null, - "id": "smaller-interstate", + "id": "capable-controversy", "metadata": {}, "outputs": [], "source": [ @@ -213,7 +213,7 @@ }, { "cell_type": "markdown", - "id": "solid-apartment", + "id": "stable-tragedy", "metadata": {}, "source": [ "### Normalization\n", @@ -230,7 +230,7 @@ { "cell_type": "code", "execution_count": null, - "id": "universal-algebra", + "id": "small-dress", "metadata": {}, "outputs": [], "source": [ @@ -242,7 +242,7 @@ }, { "cell_type": "markdown", - "id": "stylish-physiology", + "id": "spread-ceramic", "metadata": {}, "source": [ "### Sepatare Features from Labels\n", @@ -253,7 +253,7 @@ { "cell_type": "code", "execution_count": null, - "id": "everyday-sustainability", + "id": "complicated-husband", "metadata": {}, "outputs": [], "source": [ @@ -263,7 +263,7 @@ }, { "cell_type": "markdown", - "id": "nuclear-deputy", + "id": "scheduled-configuration", "metadata": {}, "source": [ "## Why Estimate $f$?\n", @@ -362,7 +362,7 @@ }, { "cell_type": "markdown", - "id": "biological-antenna", + "id": "civilian-louis", "metadata": {}, "source": [ "## Modeling Setup\n", @@ -373,19 +373,7 @@ { "cell_type": "code", "execution_count": null, - "id": "technological-charm", - "metadata": {}, - "outputs": [], - "source": [ - "# NOTE: this part of the tutorial uses additional libraries not in the default snowex jupyterhub\n", - "# mamba is a python package management alternative to conda and pip https://github.com/mamba-org/mamba\n", - "!mamba install -y -q tensorflow" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "green-agreement", + "id": "iraqi-ceremony", "metadata": {}, "outputs": [], "source": [ @@ -395,7 +383,7 @@ { "cell_type": "code", "execution_count": null, - "id": "distinguished-consultancy", + "id": "substantial-memphis", "metadata": {}, "outputs": [], "source": [ @@ -406,7 +394,7 @@ }, { "cell_type": "markdown", - "id": "super-mixer", + "id": "numerous-state", "metadata": {}, "source": [ "The machine learning algorithm uses a linear regression model to fit the features to the outcome. It will initialize different weights depending on the seed. We define a random seed so that we get same result each time we do the regression." @@ -415,7 +403,7 @@ { "cell_type": "code", "execution_count": null, - "id": "received-redhead", + "id": "continuous-theology", "metadata": {}, "outputs": [], "source": [ @@ -429,7 +417,7 @@ }, { "cell_type": "markdown", - "id": "elect-inspector", + "id": "considerable-referral", "metadata": {}, "source": [ "* **Compile**" @@ -438,7 +426,7 @@ { "cell_type": "code", "execution_count": null, - "id": "several-category", + "id": "proprietary-machinery", "metadata": {}, "outputs": [], "source": [ @@ -448,7 +436,7 @@ }, { "cell_type": "markdown", - "id": "pressed-canyon", + "id": "amino-alberta", "metadata": {}, "source": [ "The mean squared error is minimized to find optimal parameters. A discussion of diffent optimization methods is provided in Appendix A." @@ -456,7 +444,7 @@ }, { "cell_type": "markdown", - "id": "frank-maintenance", + "id": "durable-software", "metadata": {}, "source": [ "* **Print Architecture**" @@ -465,7 +453,7 @@ { "cell_type": "code", "execution_count": null, - "id": "academic-train", + "id": "located-reset", "metadata": {}, "outputs": [], "source": [ @@ -474,7 +462,7 @@ }, { "cell_type": "markdown", - "id": "mexican-surge", + "id": "fiscal-thesis", "metadata": {}, "source": [ "Note since there are 4 features, we have 5 regression parameters." @@ -482,7 +470,7 @@ }, { "cell_type": "markdown", - "id": "systematic-greek", + "id": "coupled-ballot", "metadata": {}, "source": [ "* **Fit Model**\n", @@ -493,7 +481,7 @@ { "cell_type": "code", "execution_count": null, - "id": "supposed-watch", + "id": "wrong-living", "metadata": {}, "outputs": [], "source": [ @@ -505,7 +493,7 @@ { "cell_type": "code", "execution_count": null, - "id": "encouraging-laser", + "id": "supported-chicago", "metadata": {}, "outputs": [], "source": [ @@ -518,7 +506,7 @@ }, { "cell_type": "markdown", - "id": "incorrect-crime", + "id": "synthetic-theater", "metadata": {}, "source": [ "### Linear Regression Coefficient\n", @@ -529,7 +517,7 @@ { "cell_type": "code", "execution_count": null, - "id": "particular-compound", + "id": "suitable-ontario", "metadata": {}, "outputs": [], "source": [ @@ -538,7 +526,7 @@ }, { "cell_type": "markdown", - "id": "fitted-record", + "id": "marine-twelve", "metadata": {}, "source": [ "**model**: $\\texttt{snow_depth} = 0.18 - 0.31 \\texttt{amplitude} - 0.14 \\texttt{coherence} + 0.40\\texttt{phase} + 0.40\\texttt{inc_ang} $ " @@ -546,7 +534,7 @@ }, { "cell_type": "markdown", - "id": "infinite-aging", + "id": "loving-gazette", "metadata": {}, "source": [ "## Neural Networks\n", @@ -592,7 +580,7 @@ }, { "cell_type": "markdown", - "id": "numeric-porter", + "id": "dutch-citation", "metadata": {}, "source": [ "### Visualizing Activation Functions" @@ -601,7 +589,7 @@ { "cell_type": "code", "execution_count": null, - "id": "elementary-newsletter", + "id": "approved-linux", "metadata": {}, "outputs": [], "source": [ @@ -686,7 +674,7 @@ }, { "cell_type": "markdown", - "id": "english-richards", + "id": "fundamental-climate", "metadata": {}, "source": [ "### Families of Neural Networks\n", @@ -702,7 +690,7 @@ }, { "cell_type": "markdown", - "id": "smooth-blackjack", + "id": "silent-reset", "metadata": {}, "source": [ "### Feedforward Neural Network\n", @@ -715,7 +703,7 @@ { "cell_type": "code", "execution_count": null, - "id": "social-trigger", + "id": "proprietary-miracle", "metadata": {}, "outputs": [], "source": [ @@ -735,7 +723,7 @@ }, { "cell_type": "markdown", - "id": "numeric-consistency", + "id": "spiritual-campus", "metadata": {}, "source": [ "* **Compile**\n", @@ -746,7 +734,7 @@ { "cell_type": "code", "execution_count": null, - "id": "crazy-extraction", + "id": "prepared-seminar", "metadata": {}, "outputs": [], "source": [ @@ -756,7 +744,7 @@ }, { "cell_type": "markdown", - "id": "ruled-indonesia", + "id": "friendly-wings", "metadata": {}, "source": [ "* **Print Architecture**" @@ -765,7 +753,7 @@ { "cell_type": "code", "execution_count": null, - "id": "clear-night", + "id": "protecting-identification", "metadata": {}, "outputs": [], "source": [ @@ -774,7 +762,7 @@ }, { "cell_type": "markdown", - "id": "representative-finder", + "id": "aboriginal-moscow", "metadata": {}, "source": [ "Number of weights connecting the input and the first hidden layer = (1000 $\\times$ 4) + 1000(bias) = 5000 \n", @@ -800,7 +788,7 @@ }, { "cell_type": "markdown", - "id": "documented-compiler", + "id": "packed-arctic", "metadata": {}, "source": [ "* **Fit Model**" @@ -809,7 +797,7 @@ { "cell_type": "code", "execution_count": null, - "id": "approximate-costume", + "id": "coordinate-deficit", "metadata": {}, "outputs": [], "source": [ @@ -822,7 +810,7 @@ { "cell_type": "code", "execution_count": null, - "id": "clean-cathedral", + "id": "revised-tourism", "metadata": {}, "outputs": [], "source": [ @@ -836,7 +824,7 @@ }, { "cell_type": "markdown", - "id": "coated-dragon", + "id": "defined-papua", "metadata": {}, "source": [ "### Prediction\n", @@ -847,7 +835,7 @@ { "cell_type": "code", "execution_count": null, - "id": "stupid-reservation", + "id": "dominican-majority", "metadata": {}, "outputs": [], "source": [ @@ -875,7 +863,7 @@ }, { "cell_type": "markdown", - "id": "infinite-repair", + "id": "southeast-smooth", "metadata": {}, "source": [ "### Check Performance" @@ -884,7 +872,7 @@ { "cell_type": "code", "execution_count": null, - "id": "integrated-nitrogen", + "id": "following-theme", "metadata": {}, "outputs": [], "source": [ @@ -897,7 +885,7 @@ { "cell_type": "code", "execution_count": null, - "id": "extended-tower", + "id": "matched-bearing", "metadata": {}, "outputs": [], "source": [ @@ -914,7 +902,7 @@ }, { "cell_type": "markdown", - "id": "adverse-lightning", + "id": "acute-bahrain", "metadata": {}, "source": [ "### Visualize Performance" @@ -923,7 +911,7 @@ { "cell_type": "code", "execution_count": null, - "id": "attractive-entry", + "id": "arbitrary-angel", "metadata": {}, "outputs": [], "source": [ @@ -944,7 +932,7 @@ }, { "cell_type": "markdown", - "id": "loaded-wings", + "id": "hired-citizenship", "metadata": {}, "source": [ "### Visualize Error" @@ -953,7 +941,7 @@ { "cell_type": "code", "execution_count": null, - "id": "explicit-trace", + "id": "signed-yesterday", "metadata": {}, "outputs": [], "source": [ @@ -977,7 +965,7 @@ }, { "cell_type": "markdown", - "id": "stretch-diana", + "id": "mathematical-retail", "metadata": {}, "source": [ "### Save the Best Model" @@ -986,7 +974,7 @@ { "cell_type": "code", "execution_count": null, - "id": "tight-genealogy", + "id": "rolled-campaign", "metadata": {}, "outputs": [], "source": [ @@ -998,7 +986,7 @@ }, { "cell_type": "markdown", - "id": "restricted-plain", + "id": "focal-escape", "metadata": {}, "source": [ "### Main Challenges of Machine Learning\n", @@ -1009,7 +997,7 @@ }, { "cell_type": "markdown", - "id": "north-photographer", + "id": "considerable-paris", "metadata": {}, "source": [ "### Improving your Deep Learning Model\n", @@ -1022,7 +1010,7 @@ }, { "cell_type": "markdown", - "id": "experienced-memorial", + "id": "saved-actor", "metadata": {}, "source": [ "## Your Turn\n", @@ -1036,7 +1024,7 @@ }, { "cell_type": "markdown", - "id": "induced-setting", + "id": "experienced-upset", "metadata": {}, "source": [ "## Reference\n", @@ -1051,7 +1039,7 @@ }, { "cell_type": "markdown", - "id": "unable-serve", + "id": "rotary-millennium", "metadata": {}, "source": [ "## Appendix A\n", @@ -1116,7 +1104,7 @@ }, { "cell_type": "markdown", - "id": "extensive-powder", + "id": "hungry-bracket", "metadata": {}, "source": [ "## Appendix B\n", diff --git a/book/tutorials/microstructure/microstructure-tutorial.ipynb b/book/tutorials/microstructure/microstructure-tutorial.ipynb index 267126c..3594a32 100644 --- a/book/tutorials/microstructure/microstructure-tutorial.ipynb +++ b/book/tutorials/microstructure/microstructure-tutorial.ipynb @@ -414,8 +414,16 @@ "metadata": {}, "outputs": [], "source": [ - "# Pull in some tutorial datasets \n", - "!aws s3 sync --quiet s3://snowex-data/tutorial-data/microstructure/ /tmp/microstructure" + "%%bash \n", + "\n", + "# Retrieve a copy of data files used in this tutorial from Zenodo.org:\n", + "# Re-running this cell will not re-download things if they already exist\n", + "\n", + "mkdir -p /tmp/tutorial-data\n", + "cd /tmp/tutorial-data\n", + "wget -q -nc -O data.zip https://zenodo.org/record/5504396/files/microstructure.zip\n", + "unzip -q -n data.zip\n", + "rm data.zip" ] }, { @@ -424,7 +432,7 @@ "metadata": {}, "outputs": [], "source": [ - "p = Profile.load('/tmp/microstructure/SMP/SNEX20_SMP_S19M1174_2N13_20200206.PNT',)\n", + "p = Profile.load('/tmp/tutorial-data/microstructure/SMP/SNEX20_SMP_S19M1174_2N13_20200206.PNT',)\n", "plt.plot(p.samples.distance, p.samples.force)\n", "# Prettify our plot a bit\n", "plt.title(p.name)\n", @@ -517,7 +525,7 @@ "outputs": [], "source": [ "# read micro CT for 2N13\n", - "data_dir='/tmp/microstructure/microCT/txt/'\n", + "data_dir='/tmp/tutorial-data/microstructure/microCT/txt/'\n", "[SSA_CT,height_min,height_max]=read_CT_txt_files(data_dir)\n", "\n", "SSA_CT #check out the SSA values read in from MicroCT" @@ -591,13 +599,6 @@ "\n", "It might also be interesting to compare the data to hand hardness measured in the snowpit, and to traditional hand lens measurements. " ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/book/tutorials/nsidc-access/nsidc-data-access.ipynb b/book/tutorials/nsidc-access/nsidc-data-access.ipynb index 913b0f2..ae8c435 100644 --- a/book/tutorials/nsidc-access/nsidc-data-access.ipynb +++ b/book/tutorials/nsidc-access/nsidc-data-access.ipynb @@ -596,13 +596,6 @@ "\n", "Additionally, the NASA Global Browse Imagery Service provides up to date, full resolution imagery for select NSIDC DAAC data sets as web services including WMTS, WMS, KML, and more. These layers can be accessed in GIS applications following guidance on the [GIBS documentation pages](https://wiki.earthdata.nasa.gov/display/GIBS/Geographic+Information+System+%28GIS%29+Usage). " ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/book/tutorials/sar/sentinel1.ipynb b/book/tutorials/sar/sentinel1.ipynb index 811b966..876493b 100644 --- a/book/tutorials/sar/sentinel1.ipynb +++ b/book/tutorials/sar/sentinel1.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "mobile-scout", + "id": "brutal-tutorial", "metadata": {}, "source": [ "# Sentinel-1\n", @@ -28,7 +28,7 @@ { "cell_type": "code", "execution_count": null, - "id": "convertible-roberts", + "id": "classified-namibia", "metadata": { "tags": [ "remove-input" @@ -46,7 +46,7 @@ }, { "cell_type": "markdown", - "id": "elegant-raleigh", + "id": "double-tucson", "metadata": {}, "source": [ "## Dive right in\n", @@ -59,7 +59,7 @@ { "cell_type": "code", "execution_count": null, - "id": "second-organizer", + "id": "honest-active", "metadata": {}, "outputs": [], "source": [ @@ -82,7 +82,7 @@ }, { "cell_type": "markdown", - "id": "parallel-monte", + "id": "smooth-disclosure", "metadata": {}, "source": [ "```{admonition} Interpretation\n", @@ -92,7 +92,7 @@ }, { "cell_type": "markdown", - "id": "former-intention", + "id": "inclusive-programmer", "metadata": {}, "source": [ "```{admonition} Exercise\n", @@ -106,7 +106,7 @@ }, { "cell_type": "markdown", - "id": "preceding-verification", + "id": "rotary-extent", "metadata": {}, "source": [ "## Quick facts\n", @@ -141,7 +141,7 @@ }, { "cell_type": "markdown", - "id": "mysterious-exhaust", + "id": "described-twenty", "metadata": {}, "source": [ "## Search and Discovery\n", @@ -166,7 +166,7 @@ }, { "cell_type": "markdown", - "id": "operating-elder", + "id": "pressed-strength", "metadata": {}, "source": [ "## Amplitude\n", @@ -179,7 +179,7 @@ { "cell_type": "code", "execution_count": null, - "id": "impossible-context", + "id": "forced-multimedia", "metadata": { "tags": [ "hide-input" @@ -214,7 +214,7 @@ { "cell_type": "code", "execution_count": null, - "id": "thorough-deadline", + "id": "twelve-property", "metadata": {}, "outputs": [], "source": [ @@ -226,7 +226,7 @@ { "cell_type": "code", "execution_count": null, - "id": "suspected-tampa", + "id": "fuzzy-johnson", "metadata": {}, "outputs": [], "source": [ @@ -245,7 +245,7 @@ { "cell_type": "code", "execution_count": null, - "id": "fallen-cleaners", + "id": "executive-saturday", "metadata": {}, "outputs": [], "source": [ @@ -262,7 +262,7 @@ }, { "cell_type": "markdown", - "id": "instructional-addiction", + "id": "insured-necklace", "metadata": {}, "source": [ "```{admonition} Interpretation\n", @@ -272,7 +272,7 @@ }, { "cell_type": "markdown", - "id": "lined-stranger", + "id": "aware-blackjack", "metadata": {}, "source": [ "## Phase\n", @@ -287,49 +287,52 @@ "\n", "Where $\\Delta\\Phi$ is measured change in phase, $\\lambda_i$ is the radar wavelength and $\\theta_i$ is the incidence angle. This approximation assumes dry, homogeneous snow with a depth of less than 3 meters. Note also that phase delays are also be caused by changes in atmospheric water vapor, ionospheric conditions, and tectonic displacements, so care must be taken to isolate phase changes arising from SWE changes. Isolating these signals is complicated and more studies like SnowEx are necessary to validate satellite-based SWE extractions with in-situ sensors.\n", "\n", - "The following cell gets you started with plotting phase data generated by ASF's on-demand InSAR processor. It takes about an hour for processing an interferogram, so we've done that ahead of time (see scripts in this repository: https://github.com/snowex-hackweek/hyp3SAR)." + "The following cell gets you started with plotting phase data generated by ASF's on-demand InSAR processor. It takes about an hour for processing an interferogram, so we've done that ahead of time with the scripts in this repository https://github.com/snowex-hackweek/hyp3SAR, and data outputs here https://github.com/snowex-hackweek/tutorial-data." ] }, { "cell_type": "code", "execution_count": null, - "id": "written-preference", - "metadata": { - "tags": [ - "remove-input" - ] - }, + "id": "dirty-render", + "metadata": {}, "outputs": [], "source": [ - "if not os.path.exists('/tmp/tutorial-data'):\n", - " os.chdir('/tmp')\n", - " os.system('git clone --depth 1 https://github.com/snowex-hackweek/tutorial-data.git')" + "%%bash \n", + "\n", + "# Retrieve a copy of data files used in this tutorial from Zenodo.org:\n", + "# Re-running this cell will not re-download things if they already exist\n", + "\n", + "mkdir -p /tmp/tutorial-data\n", + "cd /tmp/tutorial-data\n", + "wget -q -nc -O data.zip https://zenodo.org/record/5504396/files/sar.zip\n", + "unzip -q -n data.zip\n", + "rm data.zip" ] }, { "cell_type": "code", "execution_count": null, - "id": "trying-interval", + "id": "suitable-referral", "metadata": {}, "outputs": [], "source": [ - "path = '/tmp/tutorial-data/sar/S1AA_20201030T131820_20201111T131820_VVP012_INT80_G_ueF_EBD2/S1AA_20201030T131820_20201111T131820_VVP012_INT80_G_ueF_EBD2_unw_phase.tif'\n", + "path = '/tmp/tutorial-data/sar/sentinel1/S1AA_20201030T131820_20201111T131820_VVP012_INT80_G_ueF_EBD2/S1AA_20201030T131820_20201111T131820_VVP012_INT80_G_ueF_EBD2_unw_phase.tif'\n", "da = rioxarray.open_rasterio(path, masked=True).squeeze('band')" ] }, { "cell_type": "code", "execution_count": null, - "id": "focused-allocation", + "id": "bound-resort", "metadata": {}, "outputs": [], "source": [ - "da.hvplot.image(x='x', y='y', aspect='equal', rasterize=True, cmap='bwr', title='2020/10/30_2020/11/11 Unwrapped Phase (radians)')" + "da.hvplot.image(x='x', y='y', aspect='equal', rasterize=True, cmap='plasma', title='2020/10/30_2020/11/11 Unwrapped Phase (radians)')" ] }, { "cell_type": "markdown", - "id": "bizarre-senate", + "id": "fantastic-bacon", "metadata": {}, "source": [ "```{admonition} Interpretation\n", @@ -340,7 +343,7 @@ }, { "cell_type": "markdown", - "id": "passive-tourism", + "id": "saved-seventh", "metadata": {}, "source": [ "```{admonition} Exercises\n", @@ -354,7 +357,7 @@ }, { "cell_type": "markdown", - "id": "pretty-isolation", + "id": "signal-letter", "metadata": {}, "source": [ "## Next steps\n", diff --git a/book/tutorials/sar/swesarr.ipynb b/book/tutorials/sar/swesarr.ipynb index 779d3f4..2949f40 100644 --- a/book/tutorials/sar/swesarr.ipynb +++ b/book/tutorials/sar/swesarr.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "restricted-produce", + "id": "separate-incentive", "metadata": {}, "source": [ "![NASA](http://www.nasa.gov/sites/all/themes/custom/nasatwo/images/nasa-logo.svg)\n", @@ -27,7 +27,7 @@ }, { "cell_type": "markdown", - "id": "maritime-affair", + "id": "adjusted-southeast", "metadata": {}, "source": [ "
\n", @@ -43,7 +43,7 @@ }, { "cell_type": "markdown", - "id": "green-metabolism", + "id": "hawaiian-activation", "metadata": {}, "source": [ "# SWESARR Tutorial" @@ -51,7 +51,7 @@ }, { "cell_type": "markdown", - "id": "recreational-gabriel", + "id": "destroyed-question", "metadata": {}, "source": [ "## Quick References\n", @@ -65,7 +65,7 @@ }, { "cell_type": "markdown", - "id": "anonymous-lithuania", + "id": "agreed-mixture", "metadata": {}, "source": [ "## What is SWESARR?" @@ -74,7 +74,7 @@ { "cell_type": "code", "execution_count": null, - "id": "supposed-appreciation", + "id": "pressed-mother", "metadata": {}, "outputs": [], "source": [ @@ -85,7 +85,7 @@ }, { "cell_type": "markdown", - "id": "outer-knock", + "id": "irish-choice", "metadata": {}, "source": [ "