Skip to content

Commit

Permalink
Merge pull request #587 from locationtech/spark-3.2
Browse files Browse the repository at this point in the history
Spark 3.2.1
  • Loading branch information
echeipesh committed Jan 19, 2023
2 parents 1e314ea + c5cf70c commit 8331358
Show file tree
Hide file tree
Showing 142 changed files with 1,753 additions and 1,008 deletions.
6 changes: 0 additions & 6 deletions .circleci/README.md

This file was deleted.

131 changes: 0 additions & 131 deletions .github/disabled-workflows/build-test.yml

This file was deleted.

3 changes: 3 additions & 0 deletions .github/image/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
*
!requirements-conda.txt
!fix-permissions
28 changes: 28 additions & 0 deletions .github/image/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
FROM adoptopenjdk/openjdk11:debian-slim

# See: https://docs.conda.io/projects/conda/en/latest/user-guide/install/rpm-debian.html
RUN \
apt-get update && \
apt-get install -yq gpg && \
curl -s https://repo.anaconda.com/pkgs/misc/gpgkeys/anaconda.asc | gpg --dearmor > conda.gpg && \
install -o root -g root -m 644 conda.gpg /usr/share/keyrings/conda-archive-keyring.gpg && \
gpg --keyring /usr/share/keyrings/conda-archive-keyring.gpg --no-default-keyring --fingerprint 34161F5BF5EB1D4BFBBB8F0A8AEB4F8B29D82806 && \
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/conda-archive-keyring.gpg] https://repo.anaconda.com/pkgs/misc/debrepo/conda stable main" > /etc/apt/sources.list.d/conda.list && \
apt-get update && \
apt-get install -yq --no-install-recommends conda && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

ENV CONDA_DIR=/opt/conda
ENV PATH=$CONDA_DIR/bin:$PATH

COPY requirements-conda.txt /tmp
RUN \
conda install --quiet --yes --channel=conda-forge --file=/tmp/requirements-conda.txt && \
echo "$CONDA_DIR/lib" > /etc/ld.so.conf.d/conda.conf && \
ldconfig && \
conda clean --all --force-pkgs-dirs --yes --quiet

# Work-around for pyproj issue https://github.com/pyproj4/pyproj/issues/415
ENV PROJ_LIB=/opt/conda/share/proj

27 changes: 27 additions & 0 deletions .github/image/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
IMAGE_NAME=debian-openjdk-conda-gdal
SHA=$(shell git log -n1 --format=format:"%H" | cut -c 1-7)
VERSION?=$(SHA)
HOST=docker.io
REPO=$(HOST)/s22s
FULL_NAME=$(REPO)/$(IMAGE_NAME):$(VERSION)

.DEFAULT_GOAL := help
help:
# http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
@echo "Usage: make [target]"
@echo "Targets: "
@grep -E '^[a-zA-Z0-9_%/-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\t\033[36m%-20s\033[0m %s\n", $$1, $$2}'

all: build push ## Build and then push image

build: ## Build the docker image
docker build . -t ${FULL_NAME}

login: ## Login to the docker registry
docker login

push: login ## Push docker image to registry
docker push ${FULL_NAME}

run: build ## Build image and launch shell
docker run --rm -it ${FULL_NAME} bash
5 changes: 5 additions & 0 deletions .github/image/requirements-conda.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
python==3.8
gdal==3.1.2
libspatialindex
rasterio[s3]
rtree
66 changes: 66 additions & 0 deletions .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
name: Build and Test

on:
pull_request:
branches: ['**']
push:
branches: ['master', 'develop', 'release/*', 'spark-3.2']
tags: [v*]
release:
types: [published]

jobs:
build:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: coursier/cache-action@v6
- name: Setup JDK
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 8
cache: sbt

# Do just the compilation stage to minimize sbt memory footprint
- name: Compile
run: sbt -v -batch compile test:compile it:compile

- name: Core tests
run: sbt -batch core/test

- name: Datasource tests
run: sbt -batch datasource/test

- name: Experimental tests
run: sbt -batch experimental/test

## TODO: Update python build to be PEP 517 compatible
# - name: Install Conda dependencies
# run: |
# # $CONDA_DIR is an environment variable pointing to the root of the miniconda directory
# $CONDA_DIR/bin/conda install -c conda-forge --yes --file pyrasterframes/src/main/python/requirements-condaforge.txt
# - name: Create PyRasterFrames package
# run: sbt -v -batch pyrasterframes/package
# - name: Python tests
# run: sbt -batch pyrasterframes/test

- name: Collect artifacts
if: ${{ failure() }}
run: |
mkdir -p /tmp/core_dumps
ls -lh /tmp
cp core.* *.hs /tmp/core_dumps/ 2> /dev/null || true
cp ./core/*.log /tmp/core_dumps/ 2> /dev/null || true
cp -r /tmp/hsperfdata* /tmp/*.hprof /tmp/core_dumps/ 2> /dev/null || true
cp repo/core/core/* /tmp/core_dumps/ 2> /dev/null || true
- name: Upload core dumps
if: ${{ failure() }}
uses: actions/upload-artifact@v2
with:
name: core-dumps
path: /tmp/core_dumps
68 changes: 68 additions & 0 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
name: Compile documentation

on:
workflow_dispatch:

pull_request:
branches: ['**docs*']
push:
branches: ['master', 'release/*']
release:
types: [published]

jobs:
docs:
runs-on: ubuntu-latest
container:
image: s22s/debian-openjdk-conda-gdal:6790f8d

steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: coursier/cache-action@v6
- uses: olafurpg/setup-scala@v13
with:
java-version: adopt@1.11

- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8

- name: Install Conda dependencies
run: |
# $CONDA_DIR is an environment variable pointing to the root of the miniconda directory
$CONDA_DIR/bin/conda install -c conda-forge --yes --file pyrasterframes/src/main/python/requirements-condaforge.txt
- name: Build documentation
run: sbt makeSite

- name: Collect artifacts
if: ${{ failure() }}
run: |
mkdir -p /tmp/core_dumps
cp core.* *.hs /tmp/core_dumps 2> /dev/null || true
mkdir -p /tmp/markdown
cp pyrasterframes/target/python/docs/*.md /tmp/markdown 2> /dev/null || true
- name: Upload core dumps
if: ${{ failure() }}
uses: actions/upload-artifact@v2
with:
name: core-dumps
path: /tmp/core_dumps

- name: Upload markdown
if: ${{ failure() }}
uses: actions/upload-artifact@v2
with:
name: markdown
path: /tmp/markdown

- name: Upload rf-site
if: ${{ failure() }}
uses: actions/upload-artifact@v2
with:
name: rf-site
path: docs/target/site
2 changes: 2 additions & 0 deletions .jvmopts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-Xms2g
-Xmx4g
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ lazy val core = project
ExclusionRule(organization = "com.github.mpilquist")
),
scaffeine,
scalatest,
sparktestingbase excludeAll ExclusionRule("org.scala-lang.modules", "scala-xml_2.12"),
`scala-logging`
),
libraryDependencies ++= {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,7 @@ class RasterRefIT extends TestEnvironment {
describe("practical subregion reads") {
it("should construct a natural color composite") {
import spark.implicits._
def scene(idx: Int) = URI.create(s"https://landsat-pds.s3.us-west-2.amazonaws.com" +
s"/c1/L8/176/039/LC08_L1TP_176039_20190703_20190718_01_T1/LC08_L1TP_176039_20190703_20190718_01_T1_B$idx.TIF")
def scene(idx: Int) = TestData.remoteCOGSingleBand(idx)

val redScene = RFRasterSource(scene(4))
// [west, south, east, north]
Expand Down
Loading

0 comments on commit 8331358

Please sign in to comment.