diff --git a/dev/release/binary/.dockerignore b/dev/release/binary/.dockerignore deleted file mode 100644 index f2c46d8ce245..000000000000 --- a/dev/release/binary/.dockerignore +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -/tmp/ diff --git a/dev/release/binary/Dockerfile b/dev/release/binary/Dockerfile deleted file mode 100644 index 26ef3f9e8c05..000000000000 --- a/dev/release/binary/Dockerfile +++ /dev/null @@ -1,68 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -FROM ubuntu:18.04 - -ENV DEBIAN_FRONTEND noninteractive - -ARG DEBUG - -RUN \ - quiet=$([ "${DEBUG}" = "yes" ] || echo "-qq") && \ - apt update ${quiet} && \ - apt install -y -V ${quiet} \ - apt-utils \ - createrepo \ - devscripts \ - gpg \ - locales \ - openssh-server \ - rake \ - rpm \ - ruby \ - sudo && \ - apt clean && \ - rm -rf /var/lib/apt/lists/* - -RUN locale-gen en_US.UTF-8 - -RUN mkdir -p /run/sshd -RUN echo "StreamLocalBindUnlink yes" >> /etc/ssh/sshd_config - -ENV ARROW_USER arrow -ENV ARROW_UID 10000 - -RUN \ - groupadd --gid ${ARROW_UID} ${ARROW_USER} && \ - useradd --uid ${ARROW_UID} --gid ${ARROW_UID} --create-home ${ARROW_USER} && \ - mkdir -p /home/arrow/.gnupg /home/arrow/.ssh && \ - chown -R arrow: /home/arrow/.gnupg /home/arrow/.ssh && \ - chmod -R og-rwx /home/arrow/.gnupg /home/arrow/.ssh && \ - echo "${ARROW_USER} ALL=(ALL:ALL) NOPASSWD:ALL" | \ - EDITOR=tee visudo -f /etc/sudoers.d/arrow - -COPY id_rsa.pub /home/arrow/.ssh/authorized_keys -RUN \ - chown -R arrow: /home/arrow/.ssh && \ - chmod -R og-rwx /home/arrow/.ssh - -COPY runner.sh /home/arrow/runner.sh -RUN \ - chown -R arrow: /home/arrow/runner.sh && \ - chmod +x /home/arrow/runner.sh - -EXPOSE 22 diff --git a/dev/release/binary/runner.sh b/dev/release/binary/runner.sh deleted file mode 100755 index 5cf5033d68b5..000000000000 --- a/dev/release/binary/runner.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -u - -export LANG=C - -target_dir=/host/binary/tmp -original_owner=$(stat --format=%u ${target_dir}) -original_group=$(stat --format=%g ${target_dir}) - -sudo -H chown -R ${USER}: ${target_dir} -restore_owner() { - sudo -H chown -R ${original_owner}:${original_group} ${target_dir} -} -trap restore_owner EXIT - -cd /host - -"$@" diff --git a/dev/release/verify-apt.sh b/dev/release/verify-apt.sh deleted file mode 100755 index e7b87a3a4da6..000000000000 --- a/dev/release/verify-apt.sh +++ /dev/null @@ -1,150 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -exu - -if [ $# -lt 2 ]; then - echo "Usage: $0 VERSION rc" - echo " $0 VERSION release" - echo " $0 VERSION local" - echo " e.g.: $0 0.13.0 rc # Verify 0.13.0 RC" - echo " e.g.: $0 0.13.0 release # Verify 0.13.0" - echo " e.g.: $0 0.13.0-dev20210203 local # Verify 0.13.0-dev20210203 on local" - exit 1 -fi - -VERSION="$1" -TYPE="$2" - -local_prefix="/arrow/dev/tasks/linux-packages" - -export DEBIAN_FRONTEND=noninteractive - -apt update -apt install -y -V \ - curl \ - lsb-release - -code_name="$(lsb_release --codename --short)" -distribution="$(lsb_release --id --short | tr 'A-Z' 'a-z')" -artifactory_base_url="https://apache.jfrog.io/artifactory/arrow/${distribution}" -if [ "${TYPE}" = "rc" ]; then - artifactory_base_url+="-rc" -fi - -have_flight=yes -have_plasma=yes -workaround_missing_packages=() -case "${distribution}-${code_name}" in - debian-*) - sed \ - -i"" \ - -e "s/ main$/ main contrib non-free/g" \ - /etc/apt/sources.list - ;; -esac -if [ "$(arch)" = "aarch64" ]; then - have_plasma=no -fi - -if [ "${TYPE}" = "local" ]; then - case "${VERSION}" in - *-dev*) - package_version="$(echo "${VERSION}" | sed -e 's/-dev\(.*\)$/~dev\1/g')" - ;; - *-rc*) - package_version="$(echo "${VERSION}" | sed -e 's/-rc.*$//g')" - ;; - *) - package_version="${VERSION}" - ;; - esac - package_version+="-1" - apt_source_path="${local_prefix}/apt/repositories" - apt_source_path+="/${distribution}/pool/${code_name}/main" - apt_source_path+="/a/apache-arrow-apt-source" - apt_source_path+="/apache-arrow-apt-source_${package_version}_all.deb" - apt install -y -V "${apt_source_path}" -else - package_version="${VERSION}-1" - apt_source_base_name="apache-arrow-apt-source-latest-${code_name}.deb" - curl \ - --output "${apt_source_base_name}" \ - "${artifactory_base_url}/${apt_source_base_name}" - apt install -y -V "./${apt_source_base_name}" -fi - -if [ "${TYPE}" = "local" ]; then - sed \ - -i"" \ - -e "s,^URIs: .*$,URIs: file://${local_prefix}/apt/repositories/${distribution},g" \ - /etc/apt/sources.list.d/apache-arrow.sources - keys="${local_prefix}/KEYS" - if [ -f "${keys}" ]; then - gpg \ - --no-default-keyring \ - --keyring /usr/share/keyrings/apache-arrow-apt-source.gpg \ - --import "${keys}" - fi -else - if [ "${TYPE}" = "rc" ]; then - sed \ - -i"" \ - -e "s,^URIs: \\(.*\\)/,URIs: \\1-rc/,g" \ - /etc/apt/sources.list.d/apache-arrow.sources - fi -fi - -apt update - -apt install -y -V libarrow-glib-dev=${package_version} -required_packages=() -required_packages+=(cmake) -required_packages+=(g++) -required_packages+=(git) -required_packages+=(${workaround_missing_packages[@]}) -apt install -y -V ${required_packages[@]} -mkdir -p build -cp -a /arrow/cpp/examples/minimal_build build -pushd build/minimal_build -cmake . -make -j$(nproc) -./arrow_example -popd - -apt install -y -V libarrow-glib-dev=${package_version} -apt install -y -V libarrow-glib-doc=${package_version} - -if [ "${have_flight}" = "yes" ]; then - apt install -y -V libarrow-flight-dev=${package_version} -fi - -apt install -y -V libarrow-python-dev=${package_version} - -if [ "${have_plasma}" = "yes" ]; then - apt install -y -V libplasma-glib-dev=${package_version} - apt install -y -V libplasma-glib-doc=${package_version} - apt install -y -V plasma-store-server=${package_version} -fi - -apt install -y -V libgandiva-glib-dev=${package_version} -apt install -y -V libgandiva-glib-doc=${package_version} - -apt install -y -V libparquet-glib-dev=${package_version} -apt install -y -V libparquet-glib-doc=${package_version} diff --git a/dev/release/verify-release-candidate-wheels.bat b/dev/release/verify-release-candidate-wheels.bat deleted file mode 100644 index 2b57113a1bf7..000000000000 --- a/dev/release/verify-release-candidate-wheels.bat +++ /dev/null @@ -1,107 +0,0 @@ -@rem Licensed to the Apache Software Foundation (ASF) under one -@rem or more contributor license agreements. See the NOTICE file -@rem distributed with this work for additional information -@rem regarding copyright ownership. The ASF licenses this file -@rem to you under the Apache License, Version 2.0 (the -@rem "License"); you may not use this file except in compliance -@rem with the License. You may obtain a copy of the License at -@rem -@rem http://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, -@rem software distributed under the License is distributed on an -@rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -@rem KIND, either express or implied. See the License for the -@rem specific language governing permissions and limitations -@rem under the License. - -@rem This script downloads and installs all Windows wheels for a release -@rem candidate into temporary conda environments and makes sure that imports -@rem work - -@rem To run the script: -@rem verify-release-candidate-wheels.bat VERSION RC_NUM - -@echo on - -set _CURRENT_DIR=%CD% -set _VERIFICATION_DIR=C:\tmp\arrow-verify-release-wheels - -if not exist "C:\tmp\" mkdir C:\tmp -if exist %_VERIFICATION_DIR% rd %_VERIFICATION_DIR% /s /q -if not exist %_VERIFICATION_DIR% mkdir %_VERIFICATION_DIR% - -cd %_VERIFICATION_DIR% - -@rem clone Arrow repository to obtain test requirements -set GIT_ENV_PATH=%_VERIFICATION_DIR%\_git -call conda create -p %GIT_ENV_PATH% ^ - --no-shortcuts -f -q -y git ^ - || EXIT /B 1 -call activate %GIT_ENV_PATH% - -git clone https://github.com/apache/arrow.git || EXIT /B 1 -pushd arrow -git submodule update --init -popd - -call deactivate - -set ARROW_TEST_DATA=%cd%\arrow\testing\data - -CALL :verify_wheel 3.6 %1 %2 m -if errorlevel 1 GOTO error - -CALL :verify_wheel 3.7 %1 %2 m -if errorlevel 1 GOTO error - -CALL :verify_wheel 3.8 %1 %2 -if errorlevel 1 GOTO error - -:done -cd %_CURRENT_DIR% - -EXIT /B %ERRORLEVEL% - -:error -call deactivate -cd %_CURRENT_DIR% - -EXIT /B 1 - -@rem a batch function to verify a single wheel -:verify_wheel - -set PY_VERSION=%1 -set ARROW_VERSION=%2 -set RC_NUMBER=%3 -set ABI_TAG=%4 -set PY_VERSION_NO_PERIOD=%PY_VERSION:.=% - -set CONDA_ENV_PATH=%_VERIFICATION_DIR%\_verify-wheel-%PY_VERSION% -call conda create -p %CONDA_ENV_PATH% ^ - --no-shortcuts -f -q -y python=%PY_VERSION% ^ - || EXIT /B 1 -call activate %CONDA_ENV_PATH% - -set WHEEL_FILENAME=pyarrow-%ARROW_VERSION%-cp%PY_VERSION_NO_PERIOD%-cp%PY_VERSION_NO_PERIOD%%ABI_TAG%-win_amd64.whl - -@rem Requires GNU Wget for Windows -wget --no-check-certificate -O %WHEEL_FILENAME% https://bintray.com/apache/arrow/download_file?file_path=python-rc%%2F%ARROW_VERSION%-rc%RC_NUMBER%%%2F%WHEEL_FILENAME% || EXIT /B 1 - -pip install %WHEEL_FILENAME% || EXIT /B 1 - -pip install -r arrow/python/requirements-test.txt || EXIT /B 1 - -py.test %CONDA_ENV_PATH%\Lib\site-packages\pyarrow --pdb -v || EXIT /B 1 - -python -c "import pyarrow" || EXIT /B 1 -python -c "import pyarrow.parquet" || EXIT /B 1 -python -c "import pyarrow.flight" || EXIT /B 1 -python -c "import pyarrow.dataset" || EXIT /B 1 - -:done - -call deactivate - -EXIT /B 0 diff --git a/dev/release/verify-release-candidate.bat b/dev/release/verify-release-candidate.bat deleted file mode 100644 index bef78fc920cc..000000000000 --- a/dev/release/verify-release-candidate.bat +++ /dev/null @@ -1,130 +0,0 @@ -@rem Licensed to the Apache Software Foundation (ASF) under one -@rem or more contributor license agreements. See the NOTICE file -@rem distributed with this work for additional information -@rem regarding copyright ownership. The ASF licenses this file -@rem to you under the Apache License, Version 2.0 (the -@rem "License"); you may not use this file except in compliance -@rem with the License. You may obtain a copy of the License at -@rem -@rem http://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, -@rem software distributed under the License is distributed on an -@rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -@rem KIND, either express or implied. See the License for the -@rem specific language governing permissions and limitations -@rem under the License. - -@rem To run the script: -@rem verify-release-candidate.bat VERSION RC_NUM - -@echo on - -if not exist "C:\tmp\" mkdir C:\tmp -if exist "C:\tmp\arrow-verify-release" rd C:\tmp\arrow-verify-release /s /q -if not exist "C:\tmp\arrow-verify-release" mkdir C:\tmp\arrow-verify-release - -set _VERIFICATION_DIR=C:\tmp\arrow-verify-release -set _VERIFICATION_DIR_UNIX=C:/tmp/arrow-verify-release -set _VERIFICATION_CONDA_ENV=%_VERIFICATION_DIR%\conda-env -set _DIST_URL=https://dist.apache.org/repos/dist/dev/arrow -set _TARBALL=apache-arrow-%1.tar.gz -set ARROW_SOURCE=%_VERIFICATION_DIR%\apache-arrow-%1 -set INSTALL_DIR=%_VERIFICATION_DIR%\install - -@rem Requires GNU Wget for Windows -wget --no-check-certificate -O %_TARBALL% %_DIST_URL%/apache-arrow-%1-rc%2/%_TARBALL% || exit /B 1 - -tar xf %_TARBALL% -C %_VERIFICATION_DIR_UNIX% - -set PYTHON=3.6 - -@rem Using call with conda.bat seems necessary to avoid terminating the batch -@rem script execution -call conda create --no-shortcuts -c conda-forge -f -q -y -p %_VERIFICATION_CONDA_ENV% ^ - --file=ci\conda_env_cpp.yml ^ - --file=ci\conda_env_python.yml ^ - git ^ - python=%PYTHON% ^ - || exit /B 1 - -call activate %_VERIFICATION_CONDA_ENV% || exit /B 1 - -set GENERATOR=Visual Studio 15 2017 Win64 -set CONFIGURATION=release - -pushd %ARROW_SOURCE% - -set ARROW_HOME=%INSTALL_DIR% -set PARQUET_HOME=%INSTALL_DIR% -set PATH=%INSTALL_DIR%\bin;%PATH% - -@rem Build and test Arrow C++ libraries -mkdir %ARROW_SOURCE%\cpp\build -pushd %ARROW_SOURCE%\cpp\build - -@rem This is the path for Visual Studio Community 2017 -call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\Tools\VsDevCmd.bat" -arch=amd64 - -@rem NOTE(wesm): not using Ninja for now to be able to more easily control the -@rem generator used - -cmake -G "%GENERATOR%" ^ - -DARROW_BOOST_USE_SHARED=ON ^ - -DARROW_BUILD_STATIC=OFF ^ - -DARROW_BUILD_TESTS=ON ^ - -DARROW_CXXFLAGS="/MP" ^ - -DARROW_DATASET=ON ^ - -DARROW_FLIGHT=ON ^ - -DARROW_MIMALLOC=ON ^ - -DARROW_PARQUET=ON ^ - -DARROW_PYTHON=ON ^ - -DARROW_WITH_BROTLI=ON ^ - -DARROW_WITH_BZ2=ON ^ - -DARROW_WITH_LZ4=ON ^ - -DARROW_WITH_SNAPPY=ON ^ - -DARROW_WITH_ZLIB=ON ^ - -DARROW_WITH_ZSTD=ON ^ - -DCMAKE_BUILD_TYPE=%CONFIGURATION% ^ - -DCMAKE_INSTALL_PREFIX=%ARROW_HOME% ^ - -DCMAKE_UNITY_BUILD=ON ^ - -DGTest_SOURCE=BUNDLED ^ - .. || exit /B - -cmake --build . --target INSTALL --config Release || exit /B 1 - -@rem NOTE(wesm): Building googletest is flaky for me with ninja. Building it -@rem first fixes the problem - -@rem ninja googletest_ep || exit /B 1 -@rem ninja install || exit /B 1 - -@rem Get testing datasets for Parquet unit tests -git clone https://github.com/apache/parquet-testing.git %_VERIFICATION_DIR%\parquet-testing -set PARQUET_TEST_DATA=%_VERIFICATION_DIR%\parquet-testing\data - -git clone https://github.com/apache/arrow-testing.git %_VERIFICATION_DIR%\arrow-testing -set ARROW_TEST_DATA=%_VERIFICATION_DIR%\arrow-testing\data - -@rem Needed so python-test.exe works -set PYTHONPATH_ORIGINAL=%PYTHONPATH% -set PYTHONPATH=%CONDA_PREFIX%\Lib;%CONDA_PREFIX%\Lib\site-packages;%CONDA_PREFIX%\DLLs;%CONDA_PREFIX%;%PYTHONPATH% -ctest -VV || exit /B 1 -set PYTHONPATH=%PYTHONPATH_ORIGINAL% -popd - -@rem Build and import pyarrow -pushd %ARROW_SOURCE%\python - -pip install -r requirements-test.txt || exit /B 1 - -set PYARROW_CMAKE_GENERATOR=%GENERATOR% -set PYARROW_WITH_FLIGHT=1 -set PYARROW_WITH_PARQUET=1 -set PYARROW_WITH_DATASET=1 -python setup.py build_ext --inplace --bundle-arrow-cpp bdist_wheel || exit /B 1 -pytest pyarrow -v -s --enable-parquet || exit /B 1 - -popd - -call deactivate diff --git a/dev/release/verify-release-candidate.sh b/dev/release/verify-release-candidate.sh deleted file mode 100755 index 3fc926f4e82e..000000000000 --- a/dev/release/verify-release-candidate.sh +++ /dev/null @@ -1,808 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -# Requirements -# - Ruby >= 2.3 -# - Maven >= 3.3.9 -# - JDK >=7 -# - gcc >= 4.8 -# - Node.js >= 11.12 (best way is to use nvm) -# - Go >= 1.11 -# -# If using a non-system Boost, set BOOST_ROOT and add Boost libraries to -# LD_LIBRARY_PATH. -# -# To reuse build artifacts between runs set ARROW_TMPDIR environment variable to -# a directory where the temporary files should be placed to, note that this -# directory is not cleaned up automatically. - -case $# in - 3) ARTIFACT="$1" - VERSION="$2" - RC_NUMBER="$3" - case $ARTIFACT in - source|binaries|wheels) ;; - *) echo "Invalid argument: '${ARTIFACT}', valid options are \ -'source', 'binaries', or 'wheels'" - exit 1 - ;; - esac - ;; - *) echo "Usage: $0 source|binaries X.Y.Z RC_NUMBER" - exit 1 - ;; -esac - -set -e -set -x -set -o pipefail - -SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" && pwd)" -ARROW_DIR="$(dirname $(dirname ${SOURCE_DIR}))" - -detect_cuda() { - if ! (which nvcc && which nvidia-smi) > /dev/null; then - return 1 - fi - - local n_gpus=$(nvidia-smi --list-gpus | wc -l) - return $((${n_gpus} < 1)) -} - -# Build options for the C++ library - -if [ -z "${ARROW_CUDA:-}" ] && detect_cuda; then - ARROW_CUDA=ON -fi -: ${ARROW_CUDA:=OFF} -: ${ARROW_FLIGHT:=ON} -: ${ARROW_GANDIVA:=ON} - -ARROW_DIST_URL='https://dist.apache.org/repos/dist/dev/arrow' - -download_dist_file() { - curl \ - --silent \ - --show-error \ - --fail \ - --location \ - --remote-name $ARROW_DIST_URL/$1 -} - -download_rc_file() { - download_dist_file apache-arrow-${VERSION}-rc${RC_NUMBER}/$1 -} - -import_gpg_keys() { - download_dist_file KEYS - gpg --import KEYS -} - -fetch_archive() { - local dist_name=$1 - download_rc_file ${dist_name}.tar.gz - download_rc_file ${dist_name}.tar.gz.asc - download_rc_file ${dist_name}.tar.gz.sha256 - download_rc_file ${dist_name}.tar.gz.sha512 - gpg --verify ${dist_name}.tar.gz.asc ${dist_name}.tar.gz - shasum -a 256 -c ${dist_name}.tar.gz.sha256 - shasum -a 512 -c ${dist_name}.tar.gz.sha512 -} - -verify_dir_artifact_signatures() { - # verify the signature and the checksums of each artifact - find $1 -name '*.asc' | while read sigfile; do - artifact=${sigfile/.asc/} - gpg --verify $sigfile $artifact || exit 1 - - # go into the directory because the checksum files contain only the - # basename of the artifact - pushd $(dirname $artifact) - base_artifact=$(basename $artifact) - if [ -f $base_artifact.sha256 ]; then - shasum -a 256 -c $base_artifact.sha256 || exit 1 - fi - shasum -a 512 -c $base_artifact.sha512 || exit 1 - popd - done -} - -test_binary() { - local download_dir=binaries - mkdir -p ${download_dir} - - python $SOURCE_DIR/download_rc_binaries.py $VERSION $RC_NUMBER \ - --dest=${download_dir} - - verify_dir_artifact_signatures ${download_dir} -} - -test_apt() { - for target in "debian:buster" \ - "arm64v8/debian:buster" \ - "ubuntu:bionic" \ - "arm64v8/ubuntu:bionic" \ - "ubuntu:focal" \ - "arm64v8/ubuntu:focal" \ - "ubuntu:groovy" \ - "arm64v8/ubuntu:groovy"; do \ - case "${target}" in - arm64v8/*) - if [ "$(arch)" = "aarch64" -o -e /usr/bin/qemu-aarch64-static ]; then - : # OK - else - continue - fi - ;; - esac - if ! docker run --rm -v "${SOURCE_DIR}"/../..:/arrow:delegated \ - "${target}" \ - /arrow/dev/release/verify-apt.sh \ - "${VERSION}" \ - "rc" \ - "${BINTRAY_REPOSITORY}"; then - echo "Failed to verify the APT repository for ${target}" - exit 1 - fi - done -} - -test_yum() { - for target in "centos:7" \ - "centos:8" \ - "arm64v8/centos:8"; do - case "${target}" in - arm64v8/*) - if [ "$(arch)" = "aarch64" -o -e /usr/bin/qemu-aarch64-static ]; then - : # OK - else - continue - fi - ;; - esac - if ! docker run --rm -v "${SOURCE_DIR}"/../..:/arrow:delegated \ - "${target}" \ - /arrow/dev/release/verify-yum.sh \ - "${VERSION}" \ - "rc" \ - "${BINTRAY_REPOSITORY}"; then - echo "Failed to verify the Yum repository for ${target}" - exit 1 - fi - done -} - - -setup_tempdir() { - cleanup() { - if [ "${TEST_SUCCESS}" = "yes" ]; then - rm -fr "${ARROW_TMPDIR}" - else - echo "Failed to verify release candidate. See ${ARROW_TMPDIR} for details." - fi - } - - if [ -z "${ARROW_TMPDIR}" ]; then - # clean up automatically if ARROW_TMPDIR is not defined - ARROW_TMPDIR=$(mktemp -d -t "$1.XXXXX") - trap cleanup EXIT - else - # don't clean up automatically - mkdir -p "${ARROW_TMPDIR}" - fi -} - -setup_miniconda() { - # Setup short-lived miniconda for Python and integration tests - if [ "$(uname)" == "Darwin" ]; then - if [ "$(uname -m)" == "arm64" ]; then - MINICONDA_URL=https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-MacOSX-arm64.sh - else - MINICONDA_URL=https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - fi - else - MINICONDA_URL=https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh - fi - - MINICONDA=$PWD/test-miniconda - - if [ ! -d "${MINICONDA}" ]; then - # Setup miniconda only if the directory doesn't exist yet - wget -O miniconda.sh $MINICONDA_URL - bash miniconda.sh -b -p $MINICONDA - rm -f miniconda.sh - fi - echo "Installed miniconda at ${MINICONDA}" - - . $MINICONDA/etc/profile.d/conda.sh - - conda create -n arrow-test -y -q -c conda-forge \ - python=3.8 \ - nomkl \ - numpy \ - pandas \ - cython - conda activate arrow-test - echo "Using conda environment ${CONDA_PREFIX}" -} - -# Build and test Java (Requires newer Maven -- I used 3.3.9) - -test_package_java() { - pushd java - - mvn test - mvn package - - popd -} - -# Build and test C++ - -test_and_install_cpp() { - mkdir -p cpp/build - pushd cpp/build - - ARROW_CMAKE_OPTIONS=" -${ARROW_CMAKE_OPTIONS:-} --DCMAKE_INSTALL_PREFIX=$ARROW_HOME --DCMAKE_INSTALL_LIBDIR=lib --DARROW_FLIGHT=${ARROW_FLIGHT} --DARROW_PLASMA=ON --DARROW_ORC=ON --DARROW_PYTHON=ON --DARROW_GANDIVA=${ARROW_GANDIVA} --DARROW_PARQUET=ON --DARROW_DATASET=ON --DPARQUET_REQUIRE_ENCRYPTION=ON --DARROW_VERBOSE_THIRDPARTY_BUILD=ON --DARROW_WITH_BZ2=ON --DARROW_WITH_ZLIB=ON --DARROW_WITH_ZSTD=ON --DARROW_WITH_LZ4=ON --DARROW_WITH_SNAPPY=ON --DARROW_WITH_BROTLI=ON --DARROW_BOOST_USE_SHARED=ON --DCMAKE_BUILD_TYPE=release --DARROW_BUILD_TESTS=ON --DARROW_BUILD_INTEGRATION=ON --DARROW_CUDA=${ARROW_CUDA} --DARROW_DEPENDENCY_SOURCE=AUTO -" - cmake $ARROW_CMAKE_OPTIONS .. - - make -j$NPROC install - - # TODO: ARROW-5036: plasma-serialization_tests broken - # TODO: ARROW-5054: libgtest.so link failure in flight-server-test - LD_LIBRARY_PATH=$PWD/release:$LD_LIBRARY_PATH ctest \ - --exclude-regex "plasma-serialization_tests" \ - -j$NPROC \ - --output-on-failure \ - -L unittest - popd -} - -test_csharp() { - pushd csharp - - local csharp_bin=${PWD}/bin - mkdir -p ${csharp_bin} - - if which dotnet > /dev/null 2>&1; then - if ! which sourcelink > /dev/null 2>&1; then - local dotnet_tools_dir=$HOME/.dotnet/tools - if [ -d "${dotnet_tools_dir}" ]; then - PATH="${dotnet_tools_dir}:$PATH" - fi - fi - else - local dotnet_version=3.1.405 - local dotnet_platform= - case "$(uname)" in - Linux) - dotnet_platform=linux - ;; - Darwin) - dotnet_platform=macos - ;; - esac - local dotnet_download_thank_you_url=https://dotnet.microsoft.com/download/thank-you/dotnet-sdk-${dotnet_version}-${dotnet_platform}-x64-binaries - local dotnet_download_url=$( \ - curl --location ${dotnet_download_thank_you_url} | \ - grep 'window\.open' | \ - grep -E -o '[^"]+' | \ - sed -n 2p) - curl ${dotnet_download_url} | \ - tar xzf - -C ${csharp_bin} - PATH=${csharp_bin}:${PATH} - fi - - dotnet test - mv dummy.git ../.git - dotnet pack -c Release - mv ../.git dummy.git - - if ! which sourcelink > /dev/null 2>&1; then - dotnet tool install --tool-path ${csharp_bin} sourcelink - PATH=${csharp_bin}:${PATH} - if ! sourcelink --help > /dev/null 2>&1; then - export DOTNET_ROOT=${csharp_bin} - fi - fi - - sourcelink test artifacts/Apache.Arrow/Release/netstandard1.3/Apache.Arrow.pdb - sourcelink test artifacts/Apache.Arrow/Release/netcoreapp2.1/Apache.Arrow.pdb - - popd -} - -# Build and test Python - -test_python() { - pushd python - - pip install -r requirements-build.txt -r requirements-test.txt - - export PYARROW_WITH_DATASET=1 - export PYARROW_WITH_PARQUET=1 - export PYARROW_WITH_PLASMA=1 - if [ "${ARROW_CUDA}" = "ON" ]; then - export PYARROW_WITH_CUDA=1 - fi - if [ "${ARROW_FLIGHT}" = "ON" ]; then - export PYARROW_WITH_FLIGHT=1 - fi - if [ "${ARROW_GANDIVA}" = "ON" ]; then - export PYARROW_WITH_GANDIVA=1 - fi - - python setup.py build_ext --inplace - pytest pyarrow -v --pdb - - popd -} - -test_glib() { - pushd c_glib - - pip install meson - - meson build --prefix=$ARROW_HOME --libdir=lib - ninja -C build - ninja -C build install - - export GI_TYPELIB_PATH=$ARROW_HOME/lib/girepository-1.0:$GI_TYPELIB_PATH - - if ! bundle --version; then - gem install --no-document bundler - fi - - bundle install --path vendor/bundle - bundle exec ruby test/run-test.rb - - popd -} - -test_js() { - pushd js - - if [ "${INSTALL_NODE}" -gt 0 ]; then - export NVM_DIR="`pwd`/.nvm" - mkdir -p $NVM_DIR - curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | \ - PROFILE=/dev/null bash - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" - - nvm install --lts - npm install -g yarn - fi - - yarn --frozen-lockfile - yarn run-s clean:all lint build - yarn test - popd -} - -test_ruby() { - pushd ruby - - local modules="red-arrow red-plasma red-gandiva red-parquet" - if [ "${ARROW_CUDA}" = "ON" ]; then - modules="${modules} red-arrow-cuda" - fi - - for module in ${modules}; do - pushd ${module} - bundle install --path vendor/bundle - bundle exec ruby test/run-test.rb - popd - done - - popd -} - -test_go() { - local VERSION=1.14.1 - local ARCH=amd64 - - if [ "$(uname)" == "Darwin" ]; then - local OS=darwin - else - local OS=linux - fi - - local GO_ARCHIVE=go$VERSION.$OS-$ARCH.tar.gz - wget https://dl.google.com/go/$GO_ARCHIVE - - mkdir -p local-go - tar -xzf $GO_ARCHIVE -C local-go - rm -f $GO_ARCHIVE - - export GOROOT=`pwd`/local-go/go - export GOPATH=`pwd`/local-go/gopath - export PATH=$GOROOT/bin:$GOPATH/bin:$PATH - - pushd go/arrow - - go get -v ./... - go test ./... - go clean -modcache - - popd -} - -test_rust() { - # install rust toolchain in a similar fashion like test-miniconda - export RUSTUP_HOME=$PWD/test-rustup - export CARGO_HOME=$PWD/test-rustup - - curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path - - export PATH=$RUSTUP_HOME/bin:$PATH - source $RUSTUP_HOME/env - - # build and test rust - pushd rust - - # raises on any formatting errors - rustup component add rustfmt --toolchain stable - cargo +stable fmt --all -- --check - - # we are targeting Rust nightly for releases - rustup default nightly - - # use local modules because we don't publish modules to crates.io yet - sed \ - -i.bak \ - -E \ - -e 's/^arrow = "([^"]*)"/arrow = { version = "\1", path = "..\/arrow" }/g' \ - -e 's/^parquet = "([^"]*)"/parquet = { version = "\1", path = "..\/parquet" }/g' \ - */Cargo.toml - - # raises on any warnings - RUSTFLAGS="-D warnings" cargo build - cargo test - - popd -} - -# Run integration tests -test_integration() { - JAVA_DIR=$PWD/java - CPP_BUILD_DIR=$PWD/cpp/build - - export ARROW_JAVA_INTEGRATION_JAR=$JAVA_DIR/tools/target/arrow-tools-$VERSION-jar-with-dependencies.jar - export ARROW_CPP_EXE_PATH=$CPP_BUILD_DIR/release - - pip install -e dev/archery - - INTEGRATION_TEST_ARGS="" - - if [ "${ARROW_FLIGHT}" = "ON" ]; then - INTEGRATION_TEST_ARGS="${INTEGRATION_TEST_ARGS} --run-flight" - fi - - # Flight integration test executable have runtime dependency on - # release/libgtest.so - LD_LIBRARY_PATH=$ARROW_CPP_EXE_PATH:$LD_LIBRARY_PATH \ - archery integration \ - --with-cpp=${TEST_INTEGRATION_CPP} \ - --with-java=${TEST_INTEGRATION_JAVA} \ - --with-js=${TEST_INTEGRATION_JS} \ - --with-go=${TEST_INTEGRATION_GO} \ - $INTEGRATION_TEST_ARGS -} - -clone_testing_repositories() { - # Clone testing repositories if not cloned already - if [ ! -d "arrow-testing" ]; then - git clone https://github.com/apache/arrow-testing.git - fi - if [ ! -d "parquet-testing" ]; then - git clone https://github.com/apache/parquet-testing.git - fi - export ARROW_TEST_DATA=$PWD/arrow-testing/data - export PARQUET_TEST_DATA=$PWD/parquet-testing/data -} - -test_source_distribution() { - export ARROW_HOME=$ARROW_TMPDIR/install - export PARQUET_HOME=$ARROW_TMPDIR/install - export LD_LIBRARY_PATH=$ARROW_HOME/lib:${LD_LIBRARY_PATH:-} - export PKG_CONFIG_PATH=$ARROW_HOME/lib/pkgconfig:${PKG_CONFIG_PATH:-} - - if [ "$(uname)" == "Darwin" ]; then - NPROC=$(sysctl -n hw.ncpu) - else - NPROC=$(nproc) - fi - - clone_testing_repositories - - if [ ${TEST_JAVA} -gt 0 ]; then - test_package_java - fi - if [ ${TEST_CPP} -gt 0 ]; then - test_and_install_cpp - fi - if [ ${TEST_CSHARP} -gt 0 ]; then - test_csharp - fi - if [ ${TEST_PYTHON} -gt 0 ]; then - test_python - fi - if [ ${TEST_GLIB} -gt 0 ]; then - test_glib - fi - if [ ${TEST_RUBY} -gt 0 ]; then - test_ruby - fi - if [ ${TEST_JS} -gt 0 ]; then - test_js - fi - if [ ${TEST_GO} -gt 0 ]; then - test_go - fi - if [ ${TEST_RUST} -gt 0 ]; then - test_rust - fi - if [ ${TEST_INTEGRATION} -gt 0 ]; then - test_integration - fi -} - -test_binary_distribution() { - : ${BINTRAY_REPOSITORY:=apache/arrow} - - if [ ${TEST_BINARY} -gt 0 ]; then - test_binary - fi - if [ ${TEST_APT} -gt 0 ]; then - test_apt - fi - if [ ${TEST_YUM} -gt 0 ]; then - test_yum - fi -} - -check_python_imports() { - python << IMPORT_TESTS -import platform - -import pyarrow -import pyarrow.parquet -import pyarrow.plasma -import pyarrow.fs -import pyarrow._hdfs -import pyarrow.dataset -import pyarrow.flight - -if platform.system() == "Darwin": - macos_version = tuple(map(int, platform.mac_ver()[0].split('.'))) - check_s3fs = macos_version >= (10, 13) -else: - check_s3fs = True - -if check_s3fs: - import pyarrow._s3fs -IMPORT_TESTS -} - -test_linux_wheels() { - local py_arches="3.6m 3.7m 3.8 3.9" - local manylinuxes="2010 2014" - - for py_arch in ${py_arches}; do - local env=_verify_wheel-${py_arch} - conda create -yq -n ${env} python=${py_arch//[mu]/} - conda activate ${env} - pip install -U pip - - for ml_spec in ${manylinuxes}; do - # check the mandatory and optional imports - pip install python-rc/${VERSION}-rc${RC_NUMBER}/pyarrow-${VERSION}-cp${py_arch//[mu.]/}-cp${py_arch//./}-manylinux${ml_spec}_x86_64.whl - check_python_imports - - # install test requirements and execute the tests - pip install -r ${ARROW_DIR}/python/requirements-test.txt - python -c 'import pyarrow; pyarrow.create_library_symlinks()' - pytest --pyargs pyarrow - done - - conda deactivate - done -} - -test_macos_wheels() { - local py_arches="3.6m 3.7m 3.8 3.9" - - for py_arch in ${py_arches}; do - local env=_verify_wheel-${py_arch} - conda create -yq -n ${env} python=${py_arch//m/} - conda activate ${env} - pip install -U pip - - # check the mandatory and optional imports - pip install --find-links python-rc/${VERSION}-rc${RC_NUMBER} pyarrow==${VERSION} - check_python_imports - - # install test requirements and execute the tests - pip install -r ${ARROW_DIR}/python/requirements-test.txt - python -c 'import pyarrow; pyarrow.create_library_symlinks()' - pytest --pyargs pyarrow - - conda deactivate - done -} - -test_wheels() { - clone_testing_repositories - - local download_dir=binaries - mkdir -p ${download_dir} - - if [ "$(uname)" == "Darwin" ]; then - local filter_regex=.*macosx.* - else - local filter_regex=.*manylinux.* - fi - - python $SOURCE_DIR/download_rc_binaries.py $VERSION $RC_NUMBER \ - --regex=${filter_regex} \ - --dest=${download_dir} - - verify_dir_artifact_signatures ${download_dir} - - pushd ${download_dir} - - if [ "$(uname)" == "Darwin" ]; then - test_macos_wheels - else - test_linux_wheels - fi - - popd -} - -# By default test all functionalities. -# To deactivate one test, deactivate the test and all of its dependents -# To explicitly select one test, set TEST_DEFAULT=0 TEST_X=1 - -# Install NodeJS locally for running the JavaScript tests rather than using the -# system Node installation, which may be too old. -: ${INSTALL_NODE:=1} - -if [ "${ARTIFACT}" == "source" ]; then - : ${TEST_SOURCE:=1} -elif [ "${ARTIFACT}" == "wheels" ]; then - TEST_WHEELS=1 -else - TEST_BINARY_DISTRIBUTIONS=1 -fi -: ${TEST_SOURCE:=0} -: ${TEST_WHEELS:=0} -: ${TEST_BINARY_DISTRIBUTIONS:=0} - -: ${TEST_DEFAULT:=1} -: ${TEST_JAVA:=${TEST_DEFAULT}} -: ${TEST_CPP:=${TEST_DEFAULT}} -: ${TEST_CSHARP:=${TEST_DEFAULT}} -: ${TEST_GLIB:=${TEST_DEFAULT}} -: ${TEST_RUBY:=${TEST_DEFAULT}} -: ${TEST_PYTHON:=${TEST_DEFAULT}} -: ${TEST_JS:=${TEST_DEFAULT}} -: ${TEST_GO:=${TEST_DEFAULT}} -: ${TEST_RUST:=${TEST_DEFAULT}} -: ${TEST_INTEGRATION:=${TEST_DEFAULT}} -if [ ${TEST_BINARY_DISTRIBUTIONS} -gt 0 ]; then - TEST_BINARY_DISTRIBUTIONS_DEFAULT=${TEST_DEFAULT} -else - TEST_BINARY_DISTRIBUTIONS_DEFAULT=0 -fi -: ${TEST_BINARY:=${TEST_BINARY_DISTRIBUTIONS_DEFAULT}} -: ${TEST_APT:=${TEST_BINARY_DISTRIBUTIONS_DEFAULT}} -: ${TEST_YUM:=${TEST_BINARY_DISTRIBUTIONS_DEFAULT}} - -# For selective Integration testing, set TEST_DEFAULT=0 TEST_INTEGRATION_X=1 TEST_INTEGRATION_Y=1 -: ${TEST_INTEGRATION_CPP:=${TEST_INTEGRATION}} -: ${TEST_INTEGRATION_JAVA:=${TEST_INTEGRATION}} -: ${TEST_INTEGRATION_JS:=${TEST_INTEGRATION}} -: ${TEST_INTEGRATION_GO:=${TEST_INTEGRATION}} - -# Automatically test if its activated by a dependent -TEST_GLIB=$((${TEST_GLIB} + ${TEST_RUBY})) -TEST_CPP=$((${TEST_CPP} + ${TEST_GLIB} + ${TEST_PYTHON} + ${TEST_INTEGRATION_CPP})) -TEST_JAVA=$((${TEST_JAVA} + ${TEST_INTEGRATION_JAVA})) -TEST_JS=$((${TEST_JS} + ${TEST_INTEGRATION_JS})) -TEST_GO=$((${TEST_GO} + ${TEST_INTEGRATION_GO})) -TEST_INTEGRATION=$((${TEST_INTEGRATION} + ${TEST_INTEGRATION_CPP} + ${TEST_INTEGRATION_JAVA} + ${TEST_INTEGRATION_JS} + ${TEST_INTEGRATION_GO})) - -NEED_MINICONDA=$((${TEST_CPP} + ${TEST_WHEELS} + ${TEST_BINARY} + ${TEST_INTEGRATION})) - -: ${TEST_ARCHIVE:=apache-arrow-${VERSION}.tar.gz} -case "${TEST_ARCHIVE}" in - /*) - ;; - *) - TEST_ARCHIVE=${PWD}/${TEST_ARCHIVE} - ;; -esac - -TEST_SUCCESS=no - -setup_tempdir "arrow-${VERSION}" -echo "Working in sandbox ${ARROW_TMPDIR}" -cd ${ARROW_TMPDIR} - -if [ ${NEED_MINICONDA} -gt 0 ]; then - setup_miniconda -fi - -if [ "${ARTIFACT}" == "source" ]; then - dist_name="apache-arrow-${VERSION}" - if [ ${TEST_SOURCE} -gt 0 ]; then - import_gpg_keys - if [ ! -d "${dist_name}" ]; then - fetch_archive ${dist_name} - tar xf ${dist_name}.tar.gz - fi - else - mkdir -p ${dist_name} - if [ ! -f ${TEST_ARCHIVE} ]; then - echo "${TEST_ARCHIVE} not found" - exit 1 - fi - tar xf ${TEST_ARCHIVE} -C ${dist_name} --strip-components=1 - fi - pushd ${dist_name} - test_source_distribution - popd -elif [ "${ARTIFACT}" == "wheels" ]; then - import_gpg_keys - test_wheels -else - import_gpg_keys - test_binary_distribution -fi - -TEST_SUCCESS=yes -echo 'Release candidate looks good!' -exit 0 diff --git a/dev/release/verify-yum.sh b/dev/release/verify-yum.sh deleted file mode 100755 index b9c46c43898c..000000000000 --- a/dev/release/verify-yum.sh +++ /dev/null @@ -1,154 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -exu - -if [ $# -lt 2 ]; then - echo "Usage: $0 VERSION rc" - echo " $0 VERSION release" - echo " $0 VERSION local" - echo " e.g.: $0 0.13.0 rc # Verify 0.13.0 RC" - echo " e.g.: $0 0.13.0 release # Verify 0.13.0" - echo " e.g.: $0 0.13.0-dev20210203 local # Verify 0.13.0-dev20210203 on local" - exit 1 -fi - -VERSION="$1" -TYPE="$2" - -local_prefix="/arrow/dev/tasks/linux-packages" - -artifactory_base_url="https://apache.jfrog.io/artifactory/arrow/centos" -if [ "${TYPE}" = "rc" ]; then - artifactory_base_url+="-rc" -fi - -distribution=$(. /etc/os-release && echo "${ID}") -distribution_version=$(. /etc/os-release && echo "${VERSION_ID}") - -cmake_package=cmake -cmake_command=cmake -have_flight=yes -have_gandiva=yes -have_glib=yes -have_parquet=yes -install_command="dnf install -y --enablerepo=powertools" -case "${distribution}-${distribution_version}" in - centos-7) - cmake_package=cmake3 - cmake_command=cmake3 - have_flight=no - have_gandiva=no - install_command="yum install -y" - ;; -esac -if [ "$(arch)" = "aarch64" ]; then - have_gandiva=no -fi - -if [ "${TYPE}" = "local" ]; then - case "${VERSION}" in - *-dev*) - package_version="$(echo "${VERSION}" | sed -e 's/-dev\(.*\)$/-0.dev\1/g')" - ;; - *-rc*) - package_version="$(echo "${VERSION}" | sed -e 's/-rc.*$//g')" - package_version+="-1" - ;; - *) - package_version="${VERSION}-1" - ;; - esac - package_version+=".el${distribution_version}" - release_path="${local_prefix}/yum/repositories" - release_path+="/centos/${distribution_version}/$(arch)/Packages" - release_path+="/apache-arrow-release-${package_version}.noarch.rpm" - ${install_command} "${release_path}" -else - package_version="${VERSION}" - ${install_command} \ - ${artifactory_base_url}/${distribution_version}/apache-arrow-release-latest.rpm -fi - -if [ "${TYPE}" = "local" ]; then - sed \ - -i"" \ - -e "s,baseurl=https://apache\.jfrog\.io/artifactory/arrow/,baseurl=file://${local_prefix}/yum/repositories/,g" \ - /etc/yum.repos.d/Apache-Arrow.repo - keys="${local_prefix}/KEYS" - if [ -f "${keys}" ]; then - cp "${keys}" /etc/pki/rpm-gpg/RPM-GPG-KEY-Apache-Arrow - fi -else - if [ "${TYPE}" = "rc" ]; then - sed \ - -i"" \ - -e "s,/centos/,/centos-rc/,g" \ - /etc/yum.repos.d/Apache-Arrow.repo - fi -fi - -${install_command} --enablerepo=epel arrow-devel-${package_version} -${install_command} \ - ${cmake_package} \ - gcc-c++ \ - git \ - make -mkdir -p build -cp -a /arrow/cpp/examples/minimal_build build -pushd build/minimal_build -${cmake_command} . -make -j$(nproc) -./arrow_example -popd - -if [ "${have_glib}" = "yes" ]; then - ${install_command} --enablerepo=epel arrow-glib-devel-${package_version} - ${install_command} --enablerepo=epel arrow-glib-doc-${package_version} -fi -${install_command} --enablerepo=epel arrow-python-devel-${package_version} - -if [ "${have_glib}" = "yes" ]; then - ${install_command} --enablerepo=epel plasma-glib-devel-${package_version} - ${install_command} --enablerepo=epel plasma-glib-doc-${package_version} -else - ${install_command} --enablerepo=epel plasma-devel-${package_version} -fi - -if [ "${have_flight}" = "yes" ]; then - ${install_command} --enablerepo=epel arrow-flight-devel-${package_version} -fi - -if [ "${have_gandiva}" = "yes" ]; then - if [ "${have_glib}" = "yes" ]; then - ${install_command} --enablerepo=epel gandiva-glib-devel-${package_version} - ${install_command} --enablerepo=epel gandiva-glib-doc-${package_version} - else - ${install_command} --enablerepo=epel gandiva-devel-${package_version} - fi -fi - -if [ "${have_parquet}" = "yes" ]; then - if [ "${have_glib}" = "yes" ]; then - ${install_command} --enablerepo=epel parquet-glib-devel-${package_version} - ${install_command} --enablerepo=epel parquet-glib-doc-${package_version} - else - ${install_command} --enablerepo=epel parquet-devel-${package_version} - fi -fi