diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..6d7c802d2 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +legate/_version.py export-subst diff --git a/.gitignore b/.gitignore index de90697c9..cba72db85 100644 --- a/.gitignore +++ b/.gitignore @@ -21,10 +21,16 @@ *.json *.dylib legate/core/install_info.py +/dist /build /legion /install* +/_skbuild config.mk /docs/legate/core/build /docs/legate/core/source/api/generated *.egg-info +.cache +.vscode +_cmake_test_compile +!cmake/versions.json diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..9d7f8c323 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,71 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +cmake_minimum_required(VERSION 3.22.1 FATAL_ERROR) + +if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.13) + cmake_policy(SET CMP0077 NEW) + set(CMAKE_POLICY_DEFAULT_CMP0077 NEW) +endif() + +if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.16) + cmake_policy(SET CMP0096 NEW) + set(CMAKE_POLICY_DEFAULT_CMP0096 NEW) +endif() + +if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.21) + cmake_policy(SET CMP0126 NEW) + set(CMAKE_POLICY_DEFAULT_CMP0126 NEW) +endif() + +############################################################################## +# - Download and initialize RAPIDS CMake helpers ----------------------------- + +if(NOT EXISTS ${CMAKE_BINARY_DIR}/RAPIDS.cmake) + file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-22.08/RAPIDS.cmake + ${CMAKE_BINARY_DIR}/RAPIDS.cmake) +endif() +include(${CMAKE_BINARY_DIR}/RAPIDS.cmake) +include(rapids-cmake) +include(rapids-cpm) +include(rapids-cuda) +include(rapids-export) +include(rapids-find) + +set(legate_core_version 22.07.00) + +# For now we want the optimization flags to match on both normal make and cmake +# builds so we override the cmake defaults here for release, this changes +# -O3 to -O2 and removes -DNDEBUG +set(CMAKE_CXX_FLAGS_RELEASE "-O2") +set(CMAKE_CUDA_FLAGS_RELEASE "-O2") +set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "-O2 -g") +set(CMAKE_CUDA_FLAGS_RELWITHDEBINFO "-O2 -g") + +if(NOT SKBUILD) + project(legate_core VERSION ${legate_core_version} LANGUAGES C CXX) + include(${CMAKE_CURRENT_SOURCE_DIR}/legate_core_cpp.cmake) +else() + project( + legate_core_python + VERSION ${legate_core_version} + LANGUAGES # TODO: Building Python extension modules via the python_extension_module requires the C + # language to be enabled here. The test project that is built in scikit-build to verify + # various linking options for the python library is hardcoded to build with C, so until + # that is fixed we need to keep C. + C CXX) + include(${CMAKE_CURRENT_SOURCE_DIR}/legate_core_python.cmake) +endif() diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..b4e5f32f3 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +include versioneer.py +include legate/_version.py diff --git a/bind.sh b/bind.sh index 5ba329324..764fcb9b1 100755 --- a/bind.sh +++ b/bind.sh @@ -104,4 +104,4 @@ if [[ -n "${NICS+x}" ]]; then ;; esac fi -numactl "$@" +exec numactl "$@" diff --git a/cmake/Modules/cpm_helpers.cmake b/cmake/Modules/cpm_helpers.cmake new file mode 100644 index 000000000..995fe4e50 --- /dev/null +++ b/cmake/Modules/cpm_helpers.cmake @@ -0,0 +1,53 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +function(get_cpm_git_args _out_var) + + set(oneValueArgs TAG BRANCH REPOSITORY) + cmake_parse_arguments(GIT "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + set(repo_tag "") + set(gh_tag_prefix "") + # Default to specifying `GIT_REPOSITORY` and `GIT_TAG` + set(cpm_git_args GIT_REPOSITORY ${GIT_REPOSITORY}) + + if(GIT_BRANCH) + set(gh_tag_prefix "heads") + set(repo_tag "${GIT_BRANCH}") + list(APPEND cpm_git_args GIT_TAG ${GIT_BRANCH}) + elseif(GIT_TAG) + set(gh_tag_prefix "tags") + set(repo_tag "${GIT_TAG}") + list(APPEND cpm_git_args GIT_TAG ${GIT_TAG}) + endif() + + # Remove `.git` suffix from repo URL + if(GIT_REPOSITORY MATCHES "^(.*)(\.git)$") + set(GIT_REPOSITORY "${CMAKE_MATCH_1}") + endif() + if(GIT_REPOSITORY MATCHES "github\.com") + # If retrieving from github use `.zip` URL to download faster + set(cpm_git_args URL "${GIT_REPOSITORY}/archive/refs/${gh_tag_prefix}/${repo_tag}.zip") + elseif(GIT_REPOSITORY MATCHES "gitlab\.com") + # GitLab archive URIs replace slashes with dashes + string(REPLACE "/" "-" archive_tag "${repo_tag}") + # If retrieving from gitlab use `.zip` URL to download faster + set(cpm_git_args URL "${GIT_REPOSITORY}/-/archive/${repo_tag}/legion-${archive_tag}.zip") + endif() + + set(${_out_var} ${cpm_git_args} PARENT_SCOPE) + +endfunction() diff --git a/cmake/Modules/cuda_arch_helpers.cmake b/cmake/Modules/cuda_arch_helpers.cmake new file mode 100644 index 000000000..c70235f68 --- /dev/null +++ b/cmake/Modules/cuda_arch_helpers.cmake @@ -0,0 +1,88 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +function(set_cuda_arch_from_names) + set(cuda_archs "") + # translate legacy arch names into numbers + if(CMAKE_CUDA_ARCHITECTURES MATCHES "fermi") + list(APPEND cuda_archs 20) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "kepler") + list(APPEND cuda_archs 30) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "k20") + list(APPEND cuda_archs 35) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "k80") + list(APPEND cuda_archs 37) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "maxwell") + list(APPEND cuda_archs 52) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "pascal") + list(APPEND cuda_archs 60) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "volta") + list(APPEND cuda_archs 70) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "turing") + list(APPEND cuda_archs 75) + endif() + if(CMAKE_CUDA_ARCHITECTURES MATCHES "ampere") + list(APPEND cuda_archs 80) + endif() + + if(cuda_archs) + list(LENGTH cuda_archs num_archs) + if(num_archs GREATER 1) + # A CMake architecture list entry of "80" means to build both compute and sm. + # What we want is for the newest arch only to build that way, while the rest + # build only for sm. + list(POP_BACK cuda_archs latest_arch) + list(TRANSFORM cuda_archs APPEND "-real") + list(APPEND cuda_archs ${latest_arch}) + else() + list(TRANSFORM cuda_archs APPEND "-real") + endif() + set(CMAKE_CUDA_ARCHITECTURES ${cuda_archs} PARENT_SCOPE) + endif() +endfunction() + +function(add_cuda_architecture_defines defs) + message(VERBOSE "legate.core: CMAKE_CUDA_ARCHITECTURES=${CMAKE_CUDA_ARCHITECTURES}") + + set(_defs ${${defs}}) + + macro(add_def_if_arch_enabled arch def) + if("${arch}" IN_LIST CMAKE_CUDA_ARCHITECTURES OR + ("${arch}-real" IN_LIST CMAKE_CUDA_ARCHITECTURES) OR + ("${arch}-virtual" IN_LIST CMAKE_CUDA_ARCHITECTURES)) + list(APPEND _defs ${def}) + endif() + endmacro() + + add_def_if_arch_enabled("20" "FERMI_ARCH") + add_def_if_arch_enabled("30" "KEPLER_ARCH") + add_def_if_arch_enabled("35" "K20_ARCH") + add_def_if_arch_enabled("37" "K80_ARCH") + add_def_if_arch_enabled("52" "MAXWELL_ARCH") + add_def_if_arch_enabled("60" "PASCAL_ARCH") + add_def_if_arch_enabled("70" "VOLTA_ARCH") + add_def_if_arch_enabled("75" "TURING_ARCH") + add_def_if_arch_enabled("80" "AMPERE_ARCH") + + set(${defs} ${_defs} PARENT_SCOPE) +endfunction() diff --git a/cmake/Modules/legate_core_options.cmake b/cmake/Modules/legate_core_options.cmake new file mode 100644 index 000000000..1a969b1b2 --- /dev/null +++ b/cmake/Modules/legate_core_options.cmake @@ -0,0 +1,121 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +option(BUILD_SHARED_LIBS "Build legate.core shared libraries" ON) + +function(set_or_default var_name var_env) + list(LENGTH ARGN num_extra_args) + if(num_extra_args GREATER 0) + list(GET ARGN 0 var_default) + endif() + if(DEFINED ${var_name}) + message(VERBOSE "legate.core: ${var_name}=${${var_name}}") + elseif(DEFINED ENV{${var_env}}) + set(${var_name} $ENV{${var_env}} PARENT_SCOPE) + message(VERBOSE "legate.core: ${var_name}=$ENV{${var_env}} (from envvar '${var_env}')") + elseif(DEFINED var_default) + set(${var_name} ${var_default} PARENT_SCOPE) + message(VERBOSE "legate.core: ${var_name}=${var_default} (from default value)") + else() + message(VERBOSE "legate.core: not setting ${var_name}") + endif() +endfunction() + +# Initialize these vars from the CLI, then fallback to an envvar or a default value. +set_or_default(Legion_SPY USE_SPY OFF) +set_or_default(Legion_USE_LLVM USE_LLVM OFF) +set_or_default(Legion_USE_CUDA USE_CUDA OFF) +set_or_default(Legion_USE_HDF5 USE_HDF OFF) +set_or_default(Legion_USE_GASNet USE_GASNET OFF) +set_or_default(Legion_USE_OpenMP USE_OPENMP OFF) +set_or_default(Legion_BOUNDS_CHECKS CHECK_BOUNDS OFF) + +option(Legion_SPY "Enable detailed logging for Legion Spy" OFF) +option(Legion_USE_LLVM "Use LLVM JIT operations" OFF) +option(Legion_USE_HDF5 "Enable support for HDF5" OFF) +option(Legion_USE_CUDA "Enable Legion support for the CUDA runtime" OFF) +option(Legion_USE_GASNet "Enable the distributed GASNet backend" OFF) +option(Legion_USE_OpenMP "Use OpenMP" OFF) +option(Legion_USE_Python "Use Python" OFF) +option(Legion_BOUNDS_CHECKS "Enable bounds checking in Legion accessors" OFF) + +if(Legion_USE_GASNet) + set_or_default(GASNet_ROOT_DIR GASNET) + set_or_default(GASNet_CONDUIT CONDUIT "udp") + + if(Legion_USE_GASNet AND (NOT GASNet_ROOT_DIR)) + option(Legion_EMBED_GASNet "Embed a custom GASNet build into Legion" ON) + endif() +endif() + +set_or_default(Legion_MAX_DIM LEGION_MAX_DIM 4) + +# Check the max dimensions +if((Legion_MAX_DIM LESS 1) OR (Legion_MAX_DIM GREATER 9)) + message(FATAL_ERROR "The maximum number of Legate dimensions must be between 1 and 9 inclusive") +endif() + +set_or_default(Legion_MAX_FIELDS LEGION_MAX_FIELDS 256) + +# Check that max fields is between 32 and 4096 and is a power of 2 +if(NOT Legion_MAX_FIELDS MATCHES "^(32|64|128|256|512|1024|2048|4096)$") + message(FATAL_ERROR "The maximum number of Legate fields must be a power of 2 between 32 and 4096 inclusive") +endif() + + +option(legate_core_STATIC_CUDA_RUNTIME "Statically link the cuda runtime library" OFF) +option(legate_core_EXCLUDE_LEGION_FROM_ALL "Exclude Legion targets from legate.core's 'all' target" OFF) + +set_or_default(NCCL_DIR NCCL_PATH) +set_or_default(Thrust_DIR THRUST_PATH) +set_or_default(CUDA_TOOLKIT_ROOT_DIR CUDA) +set_or_default(CMAKE_CUDA_ARCHITECTURES GPU_ARCH NATIVE) +set_or_default(Legion_HIJACK_CUDART USE_CUDART_HIJACK OFF) + +include(CMakeDependentOption) +cmake_dependent_option(Legion_HIJACK_CUDART + "Allow Legion to hijack and rewrite application calls into the CUDA runtime" + ON + "Legion_USE_CUDA;Legion_HIJACK_CUDART" + OFF) +# This needs to be added as an option to force values to be visible in Legion build +option(Legion_HIJACK_CUDART "Replace default CUDA runtime with the Realm version" OFF) + +if(Legion_HIJACK_CUDART) + message(WARNING [=[ +##################################################################### +Warning: Realm's CUDA runtime hijack is incompatible with NCCL. +Please note that your code will crash catastrophically as soon as it +calls into NCCL either directly or through some other Legate library. +##################################################################### +]=]) +endif() + +if(BUILD_SHARED_LIBS) + if(Legion_HIJACK_CUDART) + # Statically link CUDA if HIJACK_CUDART is set + set(Legion_CUDA_DYNAMIC_LOAD OFF) + set(CUDA_USE_STATIC_CUDA_RUNTIME ON) + elseif(NOT DEFINED Legion_CUDA_DYNAMIC_LOAD) + # If HIJACK_CUDART isn't set and BUILD_SHARED_LIBS is true, default Legion_CUDA_DYNAMIC_LOAD to true + set(Legion_CUDA_DYNAMIC_LOAD ON) + set(CUDA_USE_STATIC_CUDA_RUNTIME OFF) + endif() +elseif(NOT DEFINED Legion_CUDA_DYNAMIC_LOAD) + # If BUILD_SHARED_LIBS is false, default Legion_CUDA_DYNAMIC_LOAD to false also + set(Legion_CUDA_DYNAMIC_LOAD OFF) + set(CUDA_USE_STATIC_CUDA_RUNTIME ON) +endif() diff --git a/cmake/Modules/set_cpu_arch_flags.cmake b/cmake/Modules/set_cpu_arch_flags.cmake new file mode 100644 index 000000000..ff3e35ca3 --- /dev/null +++ b/cmake/Modules/set_cpu_arch_flags.cmake @@ -0,0 +1,84 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +#------------------------------------------------------------------------------# +# Architecture +#------------------------------------------------------------------------------# +if(BUILD_MARCH AND BUILD_MCPU) + message(FATAL_ERROR "BUILD_MARCH and BUILD_MCPU are incompatible") +endif() + +function(set_cpu_arch_flags out_var) + # Try -march first. On platforms that don't support it, GCC will issue a hard + # error, so we'll know not to use it. Default is "native", but explicitly + # setting BUILD_MARCH="" disables use of the flag + if(BUILD_MARCH) + set(INTERNAL_BUILD_MARCH ${BUILD_MARCH}) + elseif(NOT DEFINED BUILD_MARCH) + set(INTERNAL_BUILD_MARCH "native") + endif() + + set(flags "") + + include(CheckCXXCompilerFlag) + if(INTERNAL_BUILD_MARCH) + check_cxx_compiler_flag("-march=${INTERNAL_BUILD_MARCH}" COMPILER_SUPPORTS_MARCH) + if(COMPILER_SUPPORTS_MARCH) + list(APPEND flags "-march=${INTERNAL_BUILD_MARCH}") + elseif(BUILD_MARCH) + message(FATAL_ERROR "The flag -march=${INTERNAL_BUILD_MARCH} is not supported by the compiler") + else() + unset(INTERNAL_BUILD_MARCH) + endif() + endif() + + # Try -mcpu. We do this second because it is deprecated on x86, but + # GCC won't issue a hard error, so we can't tell if it worked or not. + if (NOT INTERNAL_BUILD_MARCH AND NOT DEFINED BUILD_MARCH) + if(BUILD_MCPU) + set(INTERNAL_BUILD_MCPU ${BUILD_MCPU}) + else() + set(INTERNAL_BUILD_MCPU "native") + endif() + + check_cxx_compiler_flag("-mcpu=${INTERNAL_BUILD_MCPU}" COMPILER_SUPPORTS_MCPU) + if(COMPILER_SUPPORTS_MCPU) + list(APPEND flags "-mcpu=${INTERNAL_BUILD_MCPU}") + elseif(BUILD_MCPU) + message(FATAL_ERROR "The flag -mcpu=${INTERNAL_BUILD_MCPU} is not supported by the compiler") + else() + unset(INTERNAL_BUILD_MCPU) + endif() + endif() + + # Add flags for Power architectures + check_cxx_compiler_flag("-maltivec -Werror" COMPILER_SUPPORTS_MALTIVEC) + if(COMPILER_SUPPORTS_MALTIVEC) + list(APPEND flags "-maltivec") + endif() + check_cxx_compiler_flag("-mabi=altivec -Werror" COMPILER_SUPPORTS_MABI_ALTIVEC) + if(COMPILER_SUPPORTS_MABI_ALTIVEC) + list(APPEND flags "-mabi=altivec") + endif() + check_cxx_compiler_flag("-mvsx -Werror" COMPILER_SUPPORTS_MVSX) + if(COMPILER_SUPPORTS_MVSX) + list(APPEND flags "-mvsx") + endif() + + set(${out_var} "${flags}" PARENT_SCOPE) +endfunction() + +set_cpu_arch_flags(arch_flags) diff --git a/cmake/thirdparty/get_legion.cmake b/cmake/thirdparty/get_legion.cmake new file mode 100644 index 000000000..6e51bbf6d --- /dev/null +++ b/cmake/thirdparty/get_legion.cmake @@ -0,0 +1,128 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +function(find_or_configure_legion) + set(oneValueArgs VERSION REPOSITORY BRANCH EXCLUDE_FROM_ALL) + cmake_parse_arguments(PKG "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + + include("${rapids-cmake-dir}/export/detail/parse_version.cmake") + rapids_export_parse_version(${PKG_VERSION} Legion PKG_VERSION) + + set(Legion_CUDA_ARCH "") + if(Legion_USE_CUDA) + set(Legion_CUDA_ARCH ${CMAKE_CUDA_ARCHITECTURES}) + list(TRANSFORM Legion_CUDA_ARCH REPLACE "-real" "") + list(TRANSFORM Legion_CUDA_ARCH REPLACE "-virtual" "") + list(JOIN Legion_CUDA_ARCH "," Legion_CUDA_ARCH) + endif() + + # Detect the presence of LIBRARY_PATH envvar so we can set + # `CMAKE_LIBRARY_PATH` for Legion's FindCUDA.cmake calls. + set(_lib_path "${CMAKE_LIBRARY_PATH}") + if(DEFINED ENV{LIBRARY_PATH}) + list(APPEND _lib_path "$ENV{LIBRARY_PATH}") + endif() + + set(FIND_PKG_ARGS + GLOBAL_TARGETS Legion::Realm + Legion::Regent + Legion::Legion + Legion::RealmRuntime + Legion::LegionRuntime + BUILD_EXPORT_SET legate-core-exports + INSTALL_EXPORT_SET legate-core-exports) + + # First try to find Legion via find_package() + # so the `Legion_USE_*` variables are visible + # Use QUIET find by default. + set(_find_mode QUIET) + # If Legion_DIR/Legion_ROOT are defined as something other than empty or NOTFOUND + # use a REQUIRED find so that the build does not silently download Legion. + if(Legion_DIR OR Legion_ROOT) + set(_find_mode REQUIRED) + endif() + rapids_find_package(Legion ${PKG_VERSION} EXACT CONFIG ${_find_mode} ${FIND_PKG_ARGS}) + + if(Legion_FOUND) + message(STATUS "CPM: using local package Legion@${PKG_VERSION}") + else() + include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/cpm_helpers.cmake) + get_cpm_git_args(legion_cpm_git_args REPOSITORY ${PKG_REPOSITORY} BRANCH ${PKG_BRANCH}) + if(NOT DEFINED Legion_PYTHON_EXTRA_INSTALL_ARGS) + set(Legion_PYTHON_EXTRA_INSTALL_ARGS "--single-version-externally-managed --root=/") + endif() + # Workaround until https://gitlab.com/StanfordLegion/legion/-/merge_requests/523 is merged + if(NOT DEFINED Legion_CMAKE_INSTALL_PREFIX) + set(Legion_CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}") + endif() + # Because legion sets these as cache variables, we need to force set this as a cache variable here + # to ensure that Legion doesn't override this in the CMakeCache.txt and create an unexpected state. + # This only applies to set() but does not apply to option() variables. + # See discussion of FetchContent subtleties: + # Only use these FORCE calls if using a Legion subbuild. + # https://discourse.cmake.org/t/fetchcontent-cache-variables/1538/8 + set(Legion_MAX_DIM ${Legion_MAX_DIM} CACHE STRING "The max number of dimensions for Legion" FORCE) + set(Legion_MAX_FIELDS ${Legion_MAX_FIELDS} CACHE STRING "The max number of fields for Legion" FORCE) + rapids_cpm_find(Legion ${PKG_VERSION} ${FIND_PKG_ARGS} + CPM_ARGS + ${legion_cpm_git_args} + FIND_PACKAGE_ARGUMENTS EXACT + EXCLUDE_FROM_ALL ${PKG_EXCLUDE_FROM_ALL} + OPTIONS "CMAKE_CXX_STANDARD 17" + "CMAKE_LIBRARY_PATH ${_lib_path}" + "CMAKE_INSTALL_PREFIX ${Legion_CMAKE_INSTALL_PREFIX}" + "Legion_VERSION ${PKG_VERSION}" + "Legion_BUILD_BINDINGS ON" + "Legion_BUILD_APPS OFF" + "Legion_BUILD_TESTS OFF" + "Legion_BUILD_TUTORIAL OFF" + "Legion_REDOP_HALF ON" + "Legion_REDOP_COMPLEX ON" + "Legion_GPU_REDUCTIONS OFF" + ) + endif() + + set(Legion_USE_CUDA ${Legion_USE_CUDA} PARENT_SCOPE) + set(Legion_USE_OpenMP ${Legion_USE_OpenMP} PARENT_SCOPE) + set(Legion_USE_Python ${Legion_USE_Python} PARENT_SCOPE) + if("${Legion_NETWORKS}" MATCHES ".*gasnet(1|ex).*") + set(Legion_USE_GASNet ON PARENT_SCOPE) + endif() + + message(VERBOSE "Legion_USE_CUDA=${Legion_USE_CUDA}") + message(VERBOSE "Legion_USE_OpenMP=${Legion_USE_OpenMP}") + message(VERBOSE "Legion_USE_Python=${Legion_USE_Python}") + message(VERBOSE "Legion_USE_GASNet=${Legion_USE_GASNet}") + +endfunction() + +if(NOT DEFINED legate_core_LEGION_BRANCH) + set(legate_core_LEGION_BRANCH control_replication) +endif() + +if(NOT DEFINED legate_core_LEGION_REPOSITORY) + set(legate_core_LEGION_REPOSITORY https://gitlab.com/StanfordLegion/legion.git) +endif() + +if(NOT DEFINED legate_core_LEGION_VERSION) + set(legate_core_LEGION_VERSION "${legate_core_VERSION_MAJOR}.${legate_core_VERSION_MINOR}.0") +endif() + +find_or_configure_legion(VERSION ${legate_core_LEGION_VERSION} + REPOSITORY ${legate_core_LEGION_REPOSITORY} + BRANCH ${legate_core_LEGION_BRANCH} + EXCLUDE_FROM_ALL ${legate_core_EXCLUDE_LEGION_FROM_ALL} +) diff --git a/cmake/thirdparty/get_nccl.cmake b/cmake/thirdparty/get_nccl.cmake new file mode 100644 index 000000000..1aee52b6f --- /dev/null +++ b/cmake/thirdparty/get_nccl.cmake @@ -0,0 +1,34 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +function(find_or_configure_nccl) + + if(TARGET NCCL::NCCL) + return() + endif() + + rapids_find_generate_module(NCCL + HEADER_NAMES nccl.h + LIBRARY_NAMES nccl + ) + + # Currently NCCL has no CMake build-system so we require + # it built and installed on the machine already + rapids_find_package(NCCL REQUIRED) + +endfunction() + +find_or_configure_nccl() diff --git a/cmake/thirdparty/get_thrust.cmake b/cmake/thirdparty/get_thrust.cmake new file mode 100644 index 000000000..84784a1ce --- /dev/null +++ b/cmake/thirdparty/get_thrust.cmake @@ -0,0 +1,26 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +# Use CPM to find or clone thrust +function(find_or_configure_thrust) + include(${rapids-cmake-dir}/cpm/thrust.cmake) + + rapids_cpm_thrust(NAMESPACE legate + BUILD_EXPORT_SET legate-core-exports + INSTALL_EXPORT_SET legate-core-exports) +endfunction() + +find_or_configure_thrust() diff --git a/cmake/versions.json b/cmake/versions.json new file mode 100644 index 000000000..02d16cf16 --- /dev/null +++ b/cmake/versions.json @@ -0,0 +1,9 @@ +{ + "packages" : { + "Thrust" : { + "version" : "1.15.0.0", + "git_url" : "https://github.com/NVIDIA/thrust.git", + "git_tag" : "1.15.0" + } + } +} diff --git a/conda/conda-build/build.sh b/conda/conda-build/build.sh index 182170f5b..769826d8e 100644 --- a/conda/conda-build/build.sh +++ b/conda/conda-build/build.sh @@ -1,46 +1,54 @@ -# Do not compile with NDEBUG until Legion handles it without warnings -export CPPFLAGS="$CPPFLAGS -UNDEBUG" - -install_args=() - -# We rely on an environment variable to determine if we need to build cpu-only bits -if [ -z "$CPU_ONLY" ]; then - # cuda, relying on the stub library provided by the toolkit - install_args+=("--cuda" "--with-cuda" "$BUILD_PREFIX") - - # nccl, relying on the conda nccl package - install_args+=("--with-nccl" "$PREFIX") - - # targetted architecture to compile cubin support for - install_args+=("--arch" "70,75,80") -fi - -#CPU targeting -install_args+=("--march" "haswell") +#!/bin/bash -#openMP support -install_args+=("--openmp") +set -x; -# Target directory -install_args+=("--install-dir" "$PREFIX") +# Rewrite conda's -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY to +# -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=BOTH +CMAKE_ARGS="$(echo "$CMAKE_ARGS" | sed -r "s@_INCLUDE=ONLY@_INCLUDE=BOTH@g")" -# Verbose mode -install_args+=("-v") +# Add our options to conda's CMAKE_ARGS +CMAKE_ARGS+=" +--log-level=VERBOSE +-DBUILD_MARCH=haswell +-DLegion_USE_OpenMP=ON +-DLegion_USE_Python=ON +-DLegion_BUILD_BINDINGS=ON" -# Move the stub library into the lib package to make the install think it's pointing at a live installation +# We rely on an environment variable to determine if we need to build cpu-only bits if [ -z "$CPU_ONLY" ]; then - cp $PREFIX/lib/stubs/libcuda.so $PREFIX/lib/libcuda.so - ln -s $PREFIX/lib $PREFIX/lib64 + CMAKE_ARGS+=" +-DLegion_USE_CUDA=ON +-DCUDA_TOOLKIT_ROOT_DIR=$PREFIX +-DCMAKE_LIBRARY_PATH=$PREFIX/lib/stubs +-DCMAKE_CUDA_ARCHITECTURES:LIST=60-real;70-real;75-real;80-real;86 +" fi -echo "Install command: $PYTHON install.py ${install_args[@]}" -$PYTHON install.py "${install_args[@]}" - -# Remove the stub library and linking -if [ -z "$CPU_ONLY" ]; then - rm $PREFIX/lib/libcuda.so - rm $PREFIX/lib64 -fi +# Do not compile with NDEBUG until Legion handles it without warnings +export CFLAGS="-UNDEBUG" +export CXXFLAGS="-UNDEBUG" +export CPPFLAGS="-UNDEBUG" +export CUDAFLAGS="-UNDEBUG" +export CMAKE_GENERATOR=Ninja + +cmake -S . -B build ${CMAKE_ARGS} +cmake --build build -j$CPU_COUNT +cmake --install build + +CMAKE_ARGS=" +-DFIND_LEGATE_CORE_CPP=ON +-Dlegate_core_ROOT=$PREFIX +" + +SKBUILD_BUILD_OPTIONS=-j$CPU_COUNT \ +$PYTHON -m pip install \ + --root / \ + --no-deps \ + --prefix "$PREFIX" \ + --no-build-isolation \ + --cache-dir "$PIP_CACHE_DIR" \ + --disable-pip-version-check \ + . -vv # Legion leaves an egg-info file which will confuse conda trying to pick up the information # Remove it so the legate-core is the only egg-info file added diff --git a/conda/conda-build/conda_build_config.yaml b/conda/conda-build/conda_build_config.yaml index 8a4d9462f..e970f469e 100644 --- a/conda/conda-build/conda_build_config.yaml +++ b/conda/conda-build/conda_build_config.yaml @@ -1,7 +1,17 @@ gpu_enabled: - true - false + python: - 3.8 - 3.9 - 3.10 + +numpy_version: + - ">=1.22" + +cmake_version: + - ">=3.20.1,!=3.23.0" + +pyarrow_version: + - ">=8.0.0" diff --git a/conda/conda-build/meta.yaml b/conda/conda-build/meta.yaml index da9618c57..59e8b2f07 100644 --- a/conda/conda-build/meta.yaml +++ b/conda/conda-build/meta.yaml @@ -65,36 +65,48 @@ build: # once an nccl package compatible with cuda-* packages is introduced, this can be removed # ignore_run_exports: # - cudatoolkit + ignore_run_exports_from: + - cuda-nvcc {% endif %} requirements: build: + - make + - ninja + - cmake {{ cmake_version }} - {{ compiler('c') }} =11.2 - {{ compiler('cxx') }} =11.2 - - make -{% if gpu_enabled_bool %} - - cuda-nvcc ={{ cuda_version }} -{% endif %} host: - zlib - python + - llvm-openmp + - scikit-build + - numpy {{ numpy_version }} {% if gpu_enabled_bool %} - nccl + - cudatoolkit ={{ cuda_version }} + - cuda-nvcc ={{ cuda_version }} - cuda-nvtx ={{ cuda_version }} + - cuda-cccl ={{ cuda_version }} + - cuda-cudart ={{ cuda_version }} - cuda-driver-dev ={{ cuda_version }} - cuda-cudart-dev ={{ cuda_version }} {% endif %} run: - cffi - - pyarrow >=5.0.0 + - llvm-openmp + - numpy {{ numpy_version }} + - pyarrow {{ pyarrow_version }} {% if gpu_enabled_bool %} - cuda-cudart >={{ cuda_version }} - nccl - - __cuda >=11.4 {% endif %} run_constrained: - __glibc >=2.17 # [linux] +{% if gpu_enabled_bool %} + - __cuda >={{ cuda_version }} +{% endif %} test: imports: diff --git a/conda/environment-test-3.10.yml b/conda/environment-test-3.10.yml index 7ff2fdcef..71bc39a89 100644 --- a/conda/environment-test-3.10.yml +++ b/conda/environment-test-3.10.yml @@ -1,14 +1,20 @@ -name: cunumeric-test +name: legate-core-test channels: - conda-forge - - defaults dependencies: - python=3.10 # build - - cutensor>=1.3.3 + - git - nccl + - make + - zlib + - cmake + - ninja + - c-compiler + - cxx-compiler - setuptools>=60 + - scikit-build>=0.13.1 # runtime - cffi @@ -17,6 +23,7 @@ dependencies: - pyarrow>=5 - scipy - typing_extensions + - llvm-openmp # tests - clang>=8 diff --git a/conda/environment-test-3.8.yml b/conda/environment-test-3.8.yml index b4874cab6..9221c3fe9 100644 --- a/conda/environment-test-3.8.yml +++ b/conda/environment-test-3.8.yml @@ -1,14 +1,20 @@ -name: cunumeric-test +name: legate-core-test channels: - conda-forge - - defaults dependencies: - python=3.8 # build - - cutensor>=1.3.3 + - git - nccl + - make + - zlib + - cmake + - ninja + - c-compiler + - cxx-compiler - setuptools>=60 + - scikit-build>=0.13.1 # runtime - cffi @@ -17,6 +23,7 @@ dependencies: - pyarrow>=5 - scipy - typing_extensions + - llvm-openmp # tests - clang>=8 diff --git a/conda/environment-test-3.9.yml b/conda/environment-test-3.9.yml index 86f7b72fe..783136972 100644 --- a/conda/environment-test-3.9.yml +++ b/conda/environment-test-3.9.yml @@ -1,14 +1,20 @@ -name: cunumeric-test +name: legate-core-test channels: - conda-forge - - defaults dependencies: - python=3.9 # build - - cutensor>=1.3.3 + - git - nccl + - make + - zlib + - cmake + - ninja + - c-compiler + - cxx-compiler - setuptools>=60 + - scikit-build>=0.13.1 # runtime - cffi @@ -17,6 +23,7 @@ dependencies: - pyarrow>=5 - scipy - typing_extensions + - llvm-openmp # tests - clang>=8 diff --git a/install.py b/install.py index 6d252b97e..504437aea 100755 --- a/install.py +++ b/install.py @@ -16,7 +16,6 @@ # import argparse -import json import multiprocessing import os import platform @@ -24,22 +23,11 @@ import shutil import subprocess import sys -import tempfile -import time from distutils import sysconfig -import setuptools - # Flush output on newlines sys.stdout.reconfigure(line_buffering=True) -os_name = platform.system() - -# Work around breaking change in setuptools 60 -setup_py_flags = [] -if int(setuptools.__version__.split(".")[0]) >= 60: - setup_py_flags = ["--single-version-externally-managed", "--root=/"] - class BooleanFlag(argparse.Action): def __init__( @@ -81,22 +69,10 @@ def __call__(self, parser, namespace, values, option_string): setattr(namespace, self.dest, not option_string.startswith("--no")) -required_thrust_version = "cuda-11.2" - -# Global variable for verbose installation -verbose_global = False - - -def verbose_check_call(*args, **kwargs): - if verbose_global: - print('Executing: "', " ".join(*args), '" with ', kwargs) - subprocess.check_call(*args, **kwargs) - - -def verbose_check_output(*args, **kwargs): - if verbose_global: - print('Executing: "', " ".join(*args), '" with ', kwargs) - return subprocess.check_output(*args, **kwargs) +def execute_command(args, verbose, **kwargs): + if verbose: + print('Executing: "', " ".join(args), '" with ', kwargs) + subprocess.check_call(args, **kwargs) def find_active_python_version_and_path(): @@ -121,7 +97,8 @@ def find_active_python_version_and_path(): ] # ensure that static libraries are replaced with the dynamic version paths = [ - os.path.splitext(p)[0] + (".dylib" if os_name == "Darwin" else ".so") + os.path.splitext(p)[0] + + (".dylib" if platform.system() == "Darwin" else ".so") for p in paths ] paths = [p for p in paths if os.path.isfile(p)] @@ -130,440 +107,6 @@ def find_active_python_version_and_path(): return version, paths[0] -def git_clone(repo_dir, url, branch=None, tag=None, commit=None): - assert branch is not None or tag is not None or commit is not None - if branch is not None: - verbose_check_call( - ["git", "clone", "--recursive", "-b", branch, url, repo_dir] - ) - elif commit is not None: - verbose_check_call(["git", "clone", "--recursive", url, repo_dir]) - verbose_check_call(["git", "checkout", commit], cwd=repo_dir) - verbose_check_call( - ["git", "submodule", "update", "--init"], cwd=repo_dir - ) - git_reset(repo_dir, commit) - else: - verbose_check_call( - [ - "git", - "clone", - "--recursive", - "--single-branch", - "-b", - tag, - url, - repo_dir, - ] - ) - verbose_check_call(["git", "checkout", "-b", "master"], cwd=repo_dir) - - -def git_reset(repo_dir, refspec): - verbose_check_call(["git", "reset", "--hard", refspec], cwd=repo_dir) - - -def git_update(repo_dir, branch=None, tag=None, commit=None): - if branch is not None: - verbose_check_call(["git", "fetch"], cwd=repo_dir) - verbose_check_call(["git", "checkout", branch], cwd=repo_dir) - verbose_check_call(["git", "pull", "--ff-only"], cwd=repo_dir) - else: - verbose_check_call(["git", "fetch"], cwd=repo_dir) - verbose_check_call(["git", "checkout", commit or tag], cwd=repo_dir) - - -def load_json_config(filename): - try: - with open(filename, "r") as f: - return json.load(f) - except IOError: - return None - - -def dump_json_config(filename, value): - with open(filename, "w") as f: - return json.dump(value, f) - - -def symlink(from_path, to_path): - if not os.path.lexists(to_path): - os.symlink(from_path, to_path) - - -def install_gasnet(gasnet_dir, conduit, thread_count): - print("Legate is installing GASNet into a local directory...") - temp_dir = tempfile.mkdtemp() - git_clone( - temp_dir, - url="https://github.com/StanfordLegion/gasnet.git", - branch="master", - ) - # Update the configuration file with the prefix for our output - # Then we can invoke make - verbose_check_call( - [ - "make", - "-j", - str(thread_count), - "CONDUIT=" + str(conduit), - "GASNET_INSTALL_DIR=" + str(gasnet_dir), - ], - cwd=temp_dir, - ) - shutil.rmtree(temp_dir) - - -def install_legion(legion_src_dir, branch, commit=None): - print("Legate is installing Legion into a local directory...") - # For now all we have to do is clone legion since we build it with Legate - git_clone( - legion_src_dir, - url="https://gitlab.com/StanfordLegion/legion.git", - branch=branch, - commit=commit, - ) - - -def install_thrust(thrust_dir): - print("Legate is installing Thrust into a local directory...") - git_clone( - thrust_dir, - url="https://github.com/thrust/thrust.git", - tag=required_thrust_version, - ) - - -def update_legion(legion_src_dir, branch, commit=None): - # Make sure we are on the right branch for single/multi-node - git_update(legion_src_dir, branch=branch, commit=commit) - - -def build_legion( - legion_src_dir, - install_dir, - cmake, - cmake_exe, - cuda_dir, - debug, - debug_release, - check_bounds, - cuda, - arch, - openmp, - march, - llvm, - hdf, - spy, - gasnet, - gasnet_dir, - conduit, - pyversion, - pylib_name, - maxdim, - maxfields, - clean_first, - extra_flags, - thread_count, - verbose, -): - no_hijack = True - - if cuda and os.environ.get("USE_CUDART_HIJACK", "0") == "1": - print( - """ -##################################################################### -Warning: Realm's CUDA runtime hijack is incompatible with NCCL. -Please note that your code will crash catastrophically as soon as it -calls into NCCL either directly or through some other Legate library. -##################################################################### - """ - ) - time.sleep(10) - no_hijack = False - - if cmake: - build_dir = os.path.join(legion_src_dir, "build") - try: - shutil.rmtree(build_dir) - except FileNotFoundError: - pass - if not os.path.exists(build_dir): - os.mkdir(build_dir) - flags = ( - [ - "-DCMAKE_BUILD_TYPE=%s" - % ( - "Debug" - if debug - else "RelWithDebInfo" - if debug_release - else "Release" - ), - "-DLegion_MAX_DIM=%s" % (str(maxdim)), - "-DLegion_MAX_FIELDS=%s" % (str(maxfields)), - "-DLegion_USE_CUDA=%s" % ("ON" if cuda else "OFF"), - "-DLegion_GPU_ARCH=%s" % arch, - "-DLegion_USE_OpenMP=%s" % ("ON" if openmp else "OFF"), - "-DBUILD_MARCH=%s" % march, - "-DLegion_USE_LLVM=%s" % ("ON" if llvm else "OFF"), - "-DLegion_USE_GASNet=%s" % ("ON" if gasnet else "OFF"), - "-DLegion_USE_HDF5=%s" % ("ON" if hdf else "OFF"), - "-DCMAKE_INSTALL_PREFIX=%s" % (os.path.realpath(install_dir)), - "-DLegion_USE_Python=On", - "-DLegion_Python_Version=%s" % pyversion, - "-DLegion_REDOP_COMPLEX=On", - "-DLegion_REDOP_HALF=On", - "-DBUILD_SHARED_LIBS=ON", - "-DLegion_BUILD_BINDINGS=On", - ] - + extra_flags - + (["-DLegion_BOUNDS_CHECKS=On"] if check_bounds else []) - + (["-DLegion_HIJACK_CUDART=Off"] if no_hijack else []) - + ( - ["-DGASNet_ROOT_DIR=%s" % gasnet_dir] - if gasnet_dir is not None - else [] - ) - + ( - ["-DGASNet_CONDUIT=%s" % conduit] - if conduit is not None - else [] - ) - + ( - ["-DCUDA_TOOLKIT_ROOT_DIR=%s" % cuda_dir] - if cuda_dir is not None - else [] - ) - + ( - ["-DCMAKE_CXX_COMPILER=%s" % os.environ["CXX"]] - if "CXX" in os.environ - else [] - ) - + ( - ["-DCMAKE_CXX_FLAGS=%s" % os.environ["CC_FLAGS"]] - if "CC_FLAGS" in os.environ - else [] - ) - ) - make_flags = ["VERBOSE=1"] if verbose else [] - make_flags += ["-C", os.path.realpath(build_dir)] - if spy: - raise NotImplementedError("Need support for Legion Spy with cmake") - try: - subprocess.check_output([cmake_exe, "--version"]) - except OSError: - print( - "Error: CMake is not installed or otherwise not executable. " - "Please check" - ) - print( - "your CMake installation and try again. You can use the " - "--with-cmake flag" - ) - print("to specify the CMake executable if it is not on PATH.") - print() - print("Attempted to execute: %s" % cmake_exe) - sys.exit(1) - verbose_check_call( - [cmake_exe] + flags + [legion_src_dir], cwd=build_dir - ) - verbose_check_call( - ["make"] + make_flags + ["-j", str(thread_count), "install"], - cwd=build_dir, - ) - # TODO: install legion spy and legion prof - else: - version = pyversion.split(".") - flags = ( - [ - "LG_RT_DIR=%s" % (os.path.join(legion_src_dir, "runtime")), - "DEBUG=%s" % (1 if debug else 0), - "DEBUG_RELEASE=%s" % (1 if debug_release else 0), - "MAX_DIM=%s" % (str(maxdim)), - "MAX_FIELDS=%s" % (str(maxfields)), - "USE_CUDA=%s" % (1 if cuda else 0), - "GPU_ARCH=%s" % arch, - "USE_OPENMP=%s" % (1 if openmp else 0), - "MARCH=%s" % march, - "USE_LLVM=%s" % (1 if llvm else 0), - "USE_GASNET=%s" % (1 if gasnet else 0), - "USE_HDF=%s" % (1 if hdf else 0), - "PREFIX=%s" % (os.path.realpath(install_dir)), - "PYTHON_VERSION_MAJOR=%s" % version[0], - "PYTHON_VERSION_MINOR=%s" % version[1], - "PYTHON_LIB=%s" % pylib_name, - "FORCE_PYTHON=1", - "USE_COMPLEX=1", - "USE_HALF=1", - "USE_SPY=%s" % (1 if spy else 0), - "REALM_USE_CUDART_HIJACK=%s" % (1 if not no_hijack else 0), - ] - + extra_flags - + (["BOUNDS_CHECKS=1"] if check_bounds else []) - + (["GASNET=%s" % gasnet_dir] if gasnet_dir is not None else []) - + (["CONDUIT=%s" % conduit] if conduit is not None else []) - + (["CUDA=%s" % cuda_dir] if cuda_dir is not None else []) - ) - - legion_python_dir = os.path.join(legion_src_dir, "bindings", "python") - if clean_first: - verbose_check_call( - ["make"] + flags + ["clean"], cwd=legion_python_dir - ) - # Explicitly ask for C++17, otherwise the Legion build will use C++11. - env = dict(os.environ.items()) - env["CXXFLAGS"] = "-std=c++17 " + env.get("CXXFLAGS", "") - verbose_check_call( - ["make"] + flags + ["-j", str(thread_count), "install"], - cwd=legion_python_dir, - env=env, - ) - verbose_check_call( - [ - sys.executable, - "setup.py", - "install", - "--prefix", - str(os.path.realpath(install_dir)), - ] - + setup_py_flags, - cwd=legion_python_dir, - ) - src = os.path.join(legion_src_dir, "runtime", "legion", "legion_c_util.h") - dst = os.path.join(install_dir, "include", "legion", "legion_c_util.h") - if not os.path.exists(dst) or os.path.getmtime(dst) < os.path.getmtime( - src - ): - verbose_check_call(["cp", src, dst]) - verbose_check_call( - [ - "cp", - "legion_spy.py", - os.path.join(install_dir, "share", "legate", "legion_spy.py"), - ], - cwd=os.path.join(legion_src_dir, "tools"), - ) - verbose_check_call( - [ - "cp", - "legion_prof.py", - os.path.join(install_dir, "share", "legate", "legion_prof.py"), - ], - cwd=os.path.join(legion_src_dir, "tools"), - ) - verbose_check_call( - [ - "cp", - "legion_serializer.py", - os.path.join( - install_dir, "share", "legate", "legion_serializer.py" - ), - ], - cwd=os.path.join(legion_src_dir, "tools"), - ) - verbose_check_call( - [ - "cp", - "legion_prof_copy.html.template", - os.path.join( - install_dir, - "share", - "legate", - "legion_prof_copy.html.template", - ), - ], - cwd=os.path.join(legion_src_dir, "tools"), - ) - verbose_check_call( - [ - "cp", - "-r", - "legion_prof_files", - os.path.join(install_dir, "share", "legate", "legion_prof_files"), - ], - cwd=os.path.join(legion_src_dir, "tools"), - ) - - -def build_legate_core( - install_dir, - legate_core_dir, - cmake, - cmake_exe, - cuda_dir, - nccl_dir, - debug, - debug_release, - cuda, - arch, - openmp, - march, - spy, - gasnet, - clean_first, - thread_count, - verbose, - unknown, -): - src_dir = os.path.join(legate_core_dir, "src") - if cmake: - print("Warning: CMake is currently not supported for Legate build.") - print("Using GNU Make for now.") - - make_flags = [ - "LEGATE_DIR=%s" % install_dir, - "DEBUG=%s" % (1 if debug else 0), - "DEBUG_RELEASE=%s" % (1 if debug_release else 0), - "USE_CUDA=%s" % (1 if cuda else 0), - "USE_OPENMP=%s" % (1 if openmp else 0), - "MARCH=%s" % march, - "GPU_ARCH=%s" % arch, - "PREFIX=%s" % str(install_dir), - "USE_GASNET=%s" % (1 if gasnet else 0), - "NCCL_DIR=%s" % nccl_dir, - ] + (["CUDA=%s" % cuda_dir] if cuda_dir is not None else []) - if clean_first: - verbose_check_call(["make"] + make_flags + ["clean"], cwd=src_dir) - verbose_check_call( - ["make"] + make_flags + ["-j", str(thread_count), "install"], - cwd=src_dir, - ) - # Fill in config.mk.in and copy it to the target destination - with open(os.path.join(src_dir, "config.mk.in")) as f: - content = f.read() - content = content.format( - debug=repr(1 if debug else 0), - debug_release=repr(1 if debug_release else 0), - cuda=repr(1 if cuda else 0), - arch=(arch if arch is not None else ""), - cuda_dir=(cuda_dir if cuda_dir is not None else ""), - openmp=repr(1 if openmp else 0), - march=march, - gasnet=repr(1 if gasnet else 0), - ) - with open(os.path.join(src_dir, "config.mk"), "wb") as f: - f.write(content.encode("utf-8")) - cmd = ["cp", "config.mk", os.path.join(install_dir, "share", "legate")] - verbose_check_call(cmd, cwd=src_dir) - # Then run setup.py - cmd = [ - sys.executable, - "setup.py", - "install", - "--recurse", - ] + setup_py_flags - if unknown is not None: - try: - prefix_loc = unknown.index("--prefix") - cmd.extend(unknown[prefix_loc : prefix_loc + 2]) - except ValueError: - cmd += ["--prefix", str(install_dir)] - else: - cmd += ["--prefix", str(install_dir)] - verbose_check_call(cmd, cwd=legate_core_dir) - - def install( gasnet, cuda, @@ -575,8 +118,8 @@ def install( spy, conduit, nccl_dir, - cmake, cmake_exe, + cmake_generator, install_dir, gasnet_dir, pylib_name, @@ -588,19 +131,20 @@ def install( check_bounds, clean_first, extra_flags, + editable, + build_isolation, thread_count, verbose, thrust_dir, + legion_dir, + legion_url, legion_branch, unknown, ): - global verbose_global - verbose_global = verbose - legate_core_dir = os.path.dirname(os.path.realpath(__file__)) + join = os.path.join - cmake_config = os.path.join(legate_core_dir, ".cmake.json") - dump_json_config(cmake_config, cmake) + legate_core_dir = os.path.dirname(os.path.realpath(__file__)) if pylib_name is None: pyversion, pylib_name = find_active_python_version_and_path() @@ -612,221 +156,131 @@ def install( pyversion = match.group(1) print("Using python lib and version: {}, {}".format(pylib_name, pyversion)) - install_dir_config = os.path.join(legate_core_dir, ".install-dir.json") - if install_dir is None: - install_dir = load_json_config(install_dir_config) - if install_dir is None: - install_dir = os.path.join(legate_core_dir, "install") - install_dir = os.path.realpath(install_dir) - dump_json_config(install_dir_config, install_dir) - os.makedirs(os.path.join(install_dir, "share", "legate"), exist_ok=True) + if install_dir is not None: + install_dir = os.path.realpath(install_dir) if thread_count is None: thread_count = multiprocessing.cpu_count() - # Save the maxdim config - maxdim_config = os.path.join(legate_core_dir, ".maxdim.json") - # Check the max dimensions - if maxdim < 1 or maxdim > 9: - raise Exception( - "The maximum number of Legate dimensions must be between 1 and 9 " - "inclusive" - ) - dump_json_config(maxdim_config, str(maxdim)) + if thrust_dir is not None: + thrust_dir = os.path.realpath(thrust_dir) - # Save the maxfields config - maxfields_config = os.path.join(legate_core_dir, ".maxfields.json") - # Check that max fields is between 32 and 4096 and is a power of 2 - if maxfields not in [32, 64, 128, 256, 512, 1024, 2048, 4096]: - raise Exception( - "The maximum number of Legate fields must be a power of 2 between " - "32 and 4096 inclusive" - ) - dump_json_config(maxfields_config, str(maxfields)) + build_dir = join(legate_core_dir, "_skbuild") - # If the user asked for a conduit and we don't have gasnet then install it - if gasnet: - conduit_config = os.path.join(legate_core_dir, ".conduit.json") - if conduit is None: - conduit = load_json_config(conduit_config) - if conduit is None: - raise Exception( - "The first time you use GASNet you need to tell us " - 'which conduit to use with the "--conduit" flag' - ) - dump_json_config(conduit_config, conduit) - gasnet_config = os.path.join( - legate_core_dir, ".gasnet" + str(conduit) + ".json" + if clean_first: + shutil.rmtree(build_dir, ignore_errors=True) + shutil.rmtree(join(legate_core_dir, "dist"), ignore_errors=True) + shutil.rmtree(join(legate_core_dir, "build"), ignore_errors=True) + shutil.rmtree( + join(legate_core_dir, "legate.core.egg-info"), + ignore_errors=True, ) - if gasnet_dir is None: - gasnet_dir = load_json_config(gasnet_config) - if gasnet_dir is None: - gasnet_dir = os.path.join(install_dir, "gasnet") - if not os.path.exists(gasnet_dir): - install_gasnet(gasnet_dir, conduit, thread_count) - dump_json_config(gasnet_config, gasnet_dir) - - # If the user asked for CUDA, make sure we know where the install - # directory is - if cuda: - cuda_config = os.path.join(legate_core_dir, ".cuda.json") - if cuda_dir is None: - cuda_dir = load_json_config(cuda_config) - if cuda_dir is None: - raise Exception( - "The first time you use CUDA you need to tell Legate " - 'where CUDA is installed with the "--with-cuda" flag.' - ) - dump_json_config(cuda_config, cuda_dir) - - arch_config = os.path.join(legate_core_dir, ".arch.json") - if arch is None: - arch = load_json_config(arch_config) - if arch is None: - try: - import pynvml - - pynvml.nvmlInit() - major, minor = pynvml.nvmlDeviceGetCudaComputeCapability( - pynvml.nvmlDeviceGetHandleByIndex(0) - ) - arch = f"{major}{minor}" - pynvml.nvmlShutdown() - except Exception as exc: - raise Exception( - "Could not auto-detect CUDA GPU architecture, please " - "specify the target architecture using --arch" - ) from exc - dump_json_config(arch_config, arch) - - nccl_config = os.path.join(legate_core_dir, ".nccl.json") - if nccl_dir is None: - nccl_dir = load_json_config(nccl_config) - if nccl_dir is None: - raise Exception( - "The first time you use CUDA you need to tell Legate " - 'where NCCL is installed with the "--with-nccl" flag.' - ) - dump_json_config(nccl_config, nccl_dir) - - # install a stable version of Thrust - thrust_config = os.path.join(legate_core_dir, ".thrust.json") - if thrust_dir is None: - thrust_dir = load_json_config(thrust_config) - if thrust_dir is None: - thrust_dir = os.path.join(install_dir, "thrust") - thrust_dir = os.path.realpath(thrust_dir) - if not os.path.exists(thrust_dir): - install_thrust(thrust_dir) - # Simply put Thrust into the environment. - os.environ["CXXFLAGS"] = ( - "-I" + thrust_dir + " " + os.environ.get("CXXFLAGS", "") - ) - dump_json_config(thrust_config, thrust_dir) - - # Build Legion from scratch. - legion_src_dir = os.path.join(legate_core_dir, "legion") - if not os.path.exists(legion_src_dir): - install_legion(legion_src_dir, branch=legion_branch) - elif clean_first: - update_legion(legion_src_dir, branch=legion_branch) - build_legion( - legion_src_dir, - install_dir, - cmake, - cmake_exe, - cuda_dir, - debug, - debug_release, - check_bounds, - cuda, - arch, - openmp, - march, - llvm, - hdf, - spy, - gasnet, - gasnet_dir, - conduit, - pyversion, - pylib_name, - maxdim, - maxfields, - clean_first, - extra_flags, - thread_count, - verbose, - ) - build_legate_core( - install_dir, - legate_core_dir, - cmake, - cmake_exe, - cuda_dir, - nccl_dir, - debug, - debug_release, - cuda, - arch, - openmp, - march, - spy, - gasnet, - clean_first, - thread_count, - verbose, - unknown, - ) - # Copy any executables that we need for legate functionality - verbose_check_call( - ["cp", "legate.py", os.path.join(install_dir, "bin", "legate")], - cwd=legate_core_dir, - ) - verbose_check_call( - [ - "cp", - "scripts/lgpatch.py", - os.path.join(install_dir, "bin", "lgpatch"), - ], - cwd=legate_core_dir, - ) - verbose_check_call( - ["cp", "bind.sh", os.path.join(install_dir, "bin", "bind.sh")], - cwd=legate_core_dir, - ) - if cuda: - # Copy CUDA configuration that the launcher needs to find CUDA path - verbose_check_call( - [ - "cp", - ".cuda.json", - os.path.join(install_dir, "share", "legate", ".cuda.json"), - ], - cwd=legate_core_dir, - ) + # Configure and build legate.core via setup.py + pip_install_cmd = [sys.executable, "-m", "pip", "install"] - # Record the path to NCCL that was used in this build - libs_path = os.path.join(install_dir, "share", ".legate-libs.json") + if unknown is not None: try: - with open(libs_path, "r") as f: - libs_config = json.load(f) - except (FileNotFoundError, IOError, json.JSONDecodeError): - libs_config = {} - libs_config["nccl"] = nccl_dir - with open(libs_path, "w") as f: - json.dump(libs_config, f) + prefix_loc = unknown.index("--prefix") + pip_install_cmd += ["--root", "/"] + pip_install_cmd.extend(unknown[prefix_loc : prefix_loc + 2]) + except ValueError: + if install_dir is not None: + pip_install_cmd += [ + "--root", + "/", + "--prefix", + str(install_dir), + ] + else: + if install_dir is not None: + pip_install_cmd += ["--root", "/", "--prefix", str(install_dir)] - # Copy thrust configuration - verbose_check_call( - [ - "cp", - thrust_config, - os.path.join(install_dir, "share", "legate"), - ], - cwd=legate_core_dir, - ) + if editable: + pip_install_cmd += ["--no-deps", "--no-build-isolation", "--editable"] + else: + if not build_isolation: + pip_install_cmd += ["--no-deps", "--no-build-isolation"] + pip_install_cmd += ["--upgrade"] + + pip_install_cmd += ["."] + if verbose: + pip_install_cmd += ["-vv"] + + cmake_flags = [] + + if cmake_generator: + cmake_flags += [f"-G{cmake_generator}"] + + if debug or verbose: + cmake_flags += ["--log-level=%s" % ("DEBUG" if debug else "VERBOSE")] + + cmake_flags += f"""\ +-DCMAKE_BUILD_TYPE={( + "Debug" if debug else "RelWithDebInfo" if debug_release else "Release" +)} +-DBUILD_SHARED_LIBS=ON +-DBUILD_MARCH={march} +-DCMAKE_CUDA_ARCHITECTURES={arch} +-DLegion_MAX_DIM={str(maxdim)} +-DLegion_MAX_FIELDS={str(maxfields)} +-DLegion_SPY={("ON" if spy else "OFF")} +-DLegion_BOUNDS_CHECKS={("ON" if check_bounds else "OFF")} +-DLegion_USE_CUDA={("ON" if cuda else "OFF")} +-DLegion_USE_OpenMP={("ON" if openmp else "OFF")} +-DLegion_USE_LLVM={("ON" if llvm else "OFF")} +-DLegion_USE_GASNet={("ON" if gasnet else "OFF")} +-DLegion_USE_HDF5={("ON" if hdf else "OFF")} +-DLegion_USE_Python=ON +-DLegion_Python_Version={pyversion} +-DLegion_REDOP_COMPLEX=ON +-DLegion_REDOP_HALF=ON +-DLegion_BUILD_BINDINGS=ON +""".splitlines() + + if nccl_dir: + cmake_flags += ["-DNCCL_DIR=%s" % nccl_dir] + if gasnet_dir: + cmake_flags += ["-DGASNet_ROOT_DIR=%s" % gasnet_dir] + if conduit: + cmake_flags += ["-DGASNet_CONDUIT=%s" % conduit] + if cuda_dir: + cmake_flags += ["-DCUDA_TOOLKIT_ROOT_DIR=%s" % cuda_dir] + if thrust_dir: + cmake_flags += ["-DThrust_ROOT=%s" % thrust_dir] + if legion_dir: + cmake_flags += ["-DLegion_ROOT=%s" % legion_dir] + if legion_url: + cmake_flags += ["-Dlegate_core_LEGION_REPOSITORY=%s" % legion_url] + if legion_branch: + cmake_flags += ["-Dlegate_core_LEGION_BRANCH=%s" % legion_branch] + # Workaround until this PR is merged: + # https://gitlab.com/StanfordLegion/legion/-/merge_requests/523 + if install_dir is not None: + cmake_flags += ["-DLegion_CMAKE_INSTALL_PREFIX=%s" % install_dir] + + cmake_flags += extra_flags + cmd_env = dict(os.environ.items()) + cmd_env.update( + { + "SKBUILD_BUILD_OPTIONS": f"-j{str(thread_count)}", + "SKBUILD_CONFIGURE_OPTIONS": "\n".join(cmake_flags), + } + ) + + # execute python -m pip install . + execute_command(pip_install_cmd, verbose, cwd=legate_core_dir, env=cmd_env) + + # Install Legion if `legion_dir` a Legion build dir + if legion_dir is not None and os.path.exists( + join(legion_dir, "CMakeCache.txt") + ): + print(f"installing legion from '{legion_dir}'") + install_args = [cmake_exe, "--install", legion_dir] + if install_dir is not None: + install_args += ["--prefix", install_dir] + # Install Legion if legion_dir is a path to its build dir + execute_command(install_args, verbose) def driver(): @@ -836,6 +290,7 @@ def driver(): dest="install_dir", metavar="DIR", required=False, + default=None, help="Path to install all Legate-related software", ) parser.add_argument( @@ -913,7 +368,7 @@ def driver(): dest="arch", action="store", required=False, - default=None, + default="NATIVE", help="Specify the target GPU architecture.", ) parser.add_argument( @@ -986,12 +441,6 @@ def driver(): "install script." ), ) - parser.add_argument( - "--cmake", - action=BooleanFlag, - default=os.environ.get("USE_CMAKE", "0") == "1", - help="Build Legate with CMake instead of GNU Make.", - ) parser.add_argument( "--with-cmake", dest="cmake_exe", @@ -1000,6 +449,14 @@ def driver(): default="cmake", help="Path to CMake executable (if not on PATH).", ) + parser.add_argument( + "--cmake-generator", + dest="cmake_generator", + required=False, + default="Ninja", + choices=["Ninja", "Unix Makefiles"], + help="The CMake makefiles generator", + ) parser.add_argument( "--clean", dest="clean_first", @@ -1013,7 +470,7 @@ def driver(): action="append", required=False, default=[], - help="Extra flags for make command.", + help="Extra CMake flags.", ) parser.add_argument( "-j", @@ -1022,6 +479,25 @@ def driver(): type=int, help="Number of threads used to compile.", ) + parser.add_argument( + "--editable", + dest="editable", + action="store_true", + required=False, + default=False, + help="Perform an editable install. Disables --build-isolation if set " + "(passing --no-deps --no-build-isolation to pip).", + ) + parser.add_argument( + "--build-isolation", + dest="build_isolation", + action=BooleanFlag, + required=False, + default=True, + help="Enable isolation when building a modern source distribution. " + "Build dependencies specified by PEP 518 must be already " + "installed if this option is used.", + ) parser.add_argument( "-v", "--verbose", @@ -1037,9 +513,23 @@ def driver(): required=False, default=os.environ.get("THRUST_PATH"), help="Path to Thrust installation directory. The required version of " - "Thrust is " + required_thrust_version + " or compatible. If not " + "Thrust is cuda-11.2 or compatible. If not " "provided, Thrust will be installed automatically.", ) + parser.add_argument( + "--legion-dir", + dest="legion_dir", + required=False, + default=None, + help="Path to an existing Legion build directory.", + ) + parser.add_argument( + "--legion-url", + dest="legion_url", + required=False, + default="https://gitlab.com/StanfordLegion/legion.git", + help="Legion git URL to build Legate with.", + ) parser.add_argument( "--legion-branch", dest="legion_branch", @@ -1048,6 +538,23 @@ def driver(): help="Legion branch to build Legate with.", ) args, unknown = parser.parse_known_args() + + try: + subprocess.check_output([args.cmake_exe, "--version"]) + except OSError: + print( + "Error: CMake is not installed or otherwise not executable. " + "Please check" + ) + print( + "your CMake installation and try again. You can use the " + "--with-cmake flag" + ) + print("to specify the CMake executable if it is not on PATH.") + print() + print("Attempted to execute: %s" % args.cmake_exe) + sys.exit(1) + install(unknown=unknown, **vars(args)) diff --git a/legate/__init__.py b/legate/__init__.py index 1d0e157bb..d6b6ecd21 100644 --- a/legate/__init__.py +++ b/legate/__init__.py @@ -13,7 +13,6 @@ # limitations under the License. # from __future__ import annotations +from . import _version -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) +__version__ = _version.get_versions()["version"] # type: ignore[no-untyped-call] diff --git a/legate.py b/legate/__main__.py similarity index 72% rename from legate.py rename to legate/__main__.py index 584208227..7a0ece96f 100755 --- a/legate.py +++ b/legate/__main__.py @@ -16,7 +16,6 @@ # import argparse -import json import os import platform import shlex @@ -35,14 +34,6 @@ raise Exception("Legate does not work on %s" % platform.system()) -def load_json_config(filename): - try: - with open(filename, "r") as f: - return json.load(f) - except IOError: - return None - - def read_c_define(header_path, def_name): try: with open(header_path, "r") as f: @@ -58,16 +49,24 @@ def read_c_define(header_path, def_name): return None -def read_conduit(legate_dir): - realm_defines = os.path.join(legate_dir, "include", "realm_defines.h") +def read_conduit(realm_defines_h): for conduit in ["ibv", "ucx", "aries", "mpi", "udp"]: - if read_c_define(realm_defines, f"GASNET_CONDUIT_{conduit.upper()}"): + if read_c_define(realm_defines_h, f"GASNET_CONDUIT_{conduit.upper()}"): return conduit raise Exception("Could not detect a supported GASNet conduit") -def find_python_module(legate_dir): - lib_dir = os.path.join(legate_dir, "lib") +def read_cmake_var(pattern, filepath): + return ( + subprocess.check_output(["grep", "--color=never", pattern, filepath]) + .decode("UTF-8") + .strip() + .split("=")[1] + ) + + +def get_python_site_packages_path(legion_dir): + lib_dir = os.path.join(legion_dir, "lib") python_lib = None for f in os.listdir(lib_dir): if f.startswith("python") and not os.path.isfile(f): @@ -87,6 +86,191 @@ def find_python_module(legate_dir): return python_lib +def get_legate_build_dir(legate_dir): + join = os.path.join + exists = os.path.exists + # If using a local non-scikit-build CMake build dir, read + # Legion_BINARY_DIR and Legion_SOURCE_DIR from CMakeCache.txt + if exists(legate_build_dir := join(legate_dir, "build")) and exists( + join(legate_build_dir, "CMakeCache.txt") + ): + pass + elif exists(_skbuild_dir := join(legate_dir, "_skbuild")): + for f in os.listdir(_skbuild_dir): + # If using a local scikit-build dir at _skbuild//cmake-build, + # read Legion_BINARY_DIR and Legion_SOURCE_DIR from CMakeCache.txt + if exists( + legate_build_dir := join(_skbuild_dir, f, "cmake-build") + ) and exists(join(legate_build_dir, "CMakeCache.txt")): + cmake_cache_txt = join(legate_build_dir, "CMakeCache.txt") + try: + # Test whether FIND_LEGATE_CORE_CPP is set to ON. If it + # isn't, then we built legate_core C++ as a side-effect of + # building legate_core_python. + read_cmake_var( + "FIND_LEGATE_CORE_CPP:BOOL=OFF", cmake_cache_txt + ) + except Exception: + # If FIND_LEGATE_CORE_CPP is set to ON, check to see if + # legate_core_DIR is a valid path. If it is, check whether + # legate_core_DIR is a path to a legate_core build dir i.e. + # `-D legate_core_ROOT=/legate.core/build` + legate_core_dir = read_cmake_var( + "legate_core_DIR:PATH=", cmake_cache_txt + ) + # If legate_core_dir doesn't have a CMakeCache.txt, CMake's + # find_package found a system legate_core installation. + # Return the installation paths. + if os.path.exists( + cmake_cache_txt := join( + legate_core_dir, "CMakeCache.txt" + ) + ): + return read_cmake_var( + "legate_core_BINARY_DIR:STATIC=", cmake_cache_txt + ) + return None + return legate_build_dir + legate_build_dir = None + else: + legate_build_dir = None + return legate_build_dir + + +def get_legate_paths(): + import legate + + join = os.path.join + dirname = os.path.dirname + + legate_dir = dirname(legate.__path__[0]) + legate_build_dir = get_legate_build_dir(legate_dir) + + if legate_build_dir is None: + return { + "legate_dir": legate_dir, + "legate_build_dir": legate_build_dir, + "bind_sh_path": join(dirname(dirname(sys.argv[0])), "bind.sh"), + "legate_lib_path": join(dirname(dirname(sys.argv[0])), "lib"), + } + + cmake_cache_txt = join(legate_build_dir, "CMakeCache.txt") + legate_source_dir = read_cmake_var( + "legate_core_SOURCE_DIR:STATIC=", cmake_cache_txt + ) + legate_binary_dir = read_cmake_var( + "legate_core_BINARY_DIR:STATIC=", cmake_cache_txt + ) + + return { + "legate_dir": legate_dir, + "legate_build_dir": legate_build_dir, + "bind_sh_path": join(legate_source_dir, "bind.sh"), + "legate_lib_path": join(legate_binary_dir, "lib"), + } + + +def get_legion_paths(legate_dir, legate_build_dir=None): + + # + # Construct and return paths needed to launch `legion_python`,accounting + # for multiple ways Legion and legate_core may be configured or installed. + # + # 1. Legion was found in a standard system location (/usr, $CONDA_PREFIX) + # 2. Legion was built as a side-effect of building legate_core: + # ``` + # SKBUILD_CONFIGURE_OPTIONS="" python -m pip install . + # ``` + # 3. Legion was built in a separate directory independent of legate_core + # and the path to its build directory was given when configuring + # legate_core: + # ``` + # SKBUILD_CONFIGURE_OPTIONS="-D Legion_ROOT=/legion/build" \ + # python -m pip install . + # ``` + # + # Additionally, legate_core has multiple run modes: + # + # 1. As an installed Python module (`python -m pip install .`) + # 2. As an "editable" install (`python -m pip install --editable .`) + # + # When determining locations of Legion and legate_core paths, prioritize + # local builds over global installations. This allows devs to work in the + # source tree and re-run without overwriting existing installations. + # + + join = os.path.join + dirname = os.path.dirname + + def installed_legion_paths(legion_dir, legion_module=None): + return { + "legion_bin_path": join(legion_dir, "bin"), + "legion_lib_path": join(legion_dir, "lib"), + "realm_defines_h": join(legion_dir, "include", "realm_defines.h"), + "legion_defines_h": join( + legion_dir, "include", "legion_defines.h" + ), + "legion_spy_py": join(legion_dir, "bin", "legion_spy.py"), + "legion_prof_py": join(legion_dir, "bin", "legion_prof.py"), + "legion_python": join(legion_dir, "bin", "legion_python"), + "legion_module": legion_module, + } + + if legate_build_dir is None: + legate_build_dir = get_legate_build_dir(legate_dir) + + # If no local build dir found, assume legate installed into the python env + if legate_build_dir is None: + return installed_legion_paths(dirname(dirname(sys.argv[0]))) + + # If a legate build dir was found, read `Legion_SOURCE_DIR` and + # `Legion_BINARY_DIR` from CMakeCache.txt and return paths into the source + # and/or build dirs. This allows devs to quickly rebuild inplace and use + # the most up-to-date versions without needing to install Legion and/or + # legate_core globally. + + cmake_cache_txt = join(legate_build_dir, "CMakeCache.txt") + + try: + # Test whether Legion_DIR is set. If it isn't, then we built Legion as + # a side-effect of building legate_core + read_cmake_var("Legion_DIR:PATH=Legion_DIR-NOTFOUND", cmake_cache_txt) + except Exception: + # If Legion_DIR is a valid path, check whether it's a system install + # or a path to a Legion build dir, i.e. `-D Legion_ROOT=/legion/build` + legion_dir = read_cmake_var("Legion_DIR:PATH=", cmake_cache_txt) + # If legion_dir doesn't have a CMakeCache.txt, CMake's find_package + # found a system Legion installation. Return the installation paths. + if not os.path.exists( + cmake_cache_txt := join(legion_dir, "CMakeCache.txt") + ): + return installed_legion_paths(dirname(dirname(sys.argv[0]))) + + legion_source_dir = read_cmake_var( + "Legion_SOURCE_DIR:STATIC=", cmake_cache_txt + ) + legion_binary_dir = read_cmake_var( + "Legion_BINARY_DIR:STATIC=", cmake_cache_txt + ) + + return { + "legion_bin_path": join(legion_binary_dir, "bin"), + "legion_lib_path": join(legion_binary_dir, "lib"), + "realm_defines_h": join( + legion_binary_dir, "runtime", "realm_defines.h" + ), + "legion_defines_h": join( + legion_binary_dir, "runtime", "legion_defines.h" + ), + "legion_spy_py": join(legion_source_dir, "tools", "legion_spy.py"), + "legion_prof_py": join(legion_source_dir, "tools", "legion_prof.py"), + "legion_python": join(legion_binary_dir, "bin", "legion_python"), + "legion_module": join( + legion_source_dir, "bindings", "python", "build", "lib" + ), + } + + def run_legate( ranks, ranks_per_node, @@ -131,51 +315,52 @@ def run_legate( nic_bind, launcher_extra, ): + if verbose: + print("legate: ", str(sys.argv[0])) # Build the environment for the subprocess invocation - legate_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + legate_paths = get_legate_paths() + legate_dir = legate_paths["legate_dir"] + bind_sh_path = legate_paths["bind_sh_path"] + legate_lib_path = legate_paths["legate_lib_path"] + legate_build_dir = legate_paths["legate_build_dir"] + if verbose: + print("legate_dir: ", legate_dir) + print("bind_sh_path: ", bind_sh_path) + print("legate_lib_path: ", legate_lib_path) + print("legate_build_dir:", legate_build_dir) + + legion_paths = get_legion_paths(legate_dir, legate_build_dir) + legion_lib_path = legion_paths["legion_lib_path"] + realm_defines_h = legion_paths["realm_defines_h"] + legion_defines_h = legion_paths["legion_defines_h"] + legion_spy_py = legion_paths["legion_spy_py"] + legion_prof_py = legion_paths["legion_prof_py"] + legion_python = legion_paths["legion_python"] + legion_module = legion_paths["legion_module"] + if verbose: + print("legion_lib_path: ", legion_lib_path) + print("realm_defines_h: ", realm_defines_h) + print("legion_defines_h:", legion_defines_h) + print("legion_spy_py: ", legion_spy_py) + print("legion_prof_py: ", legion_prof_py) + print("legion_python: ", legion_python) + print("legion_module: ", legion_module) + cmd_env = dict(os.environ.items()) - env_json = load_json_config( - os.path.join(legate_dir, "share", ".legate-env.json") - ) - if env_json is not None: - append_vars = env_json.get("APPEND_VARS", []) - append_vars_tuplified = [tuple(var) for var in append_vars] - for (k, v) in append_vars_tuplified: - if k not in cmd_env: - cmd_env[k] = v - else: - cmd_env[k] = cmd_env[k] + os.pathsep + v - vars = env_json.get("VARS", []) - vars_tuplified = [tuple(var) for var in vars] - for (k, v) in vars_tuplified: - cmd_env[k] = v - libs_json = load_json_config( - os.path.join(legate_dir, "share", ".legate-libs.json") - ) - if libs_json is not None: - for lib_dir in libs_json.values(): - if LIB_PATH not in cmd_env: - cmd_env[LIB_PATH] = os.path.join(lib_dir, "lib") - else: - cmd_env[LIB_PATH] = ( - os.path.join(lib_dir, "lib") - + os.pathsep - + cmd_env[LIB_PATH] - ) # We never want to save python byte code for legate cmd_env["PYTHONDONTWRITEBYTECODE"] = "1" # Set the path to the Legate module as an environment variable # The current directory should be added to PYTHONPATH as well - if "PYTHONPATH" in cmd_env: - cmd_env["PYTHONPATH"] += os.pathsep + "" - else: - cmd_env["PYTHONPATH"] = "" - cmd_env["PYTHONPATH"] += os.pathsep + find_python_module(legate_dir) + if legion_module is not None: + cmd_env["PYTHONPATH"] = os.pathsep.join( + ([cmd_env["PYTHONPATH"]] if ("PYTHONPATH" in cmd_env) else []) + + [legion_module] + ) + # Make sure the version of Python used by Realm is the same as what the # user is using currently. curr_pyhome = os.path.dirname(os.path.dirname(sys.executable)) - realm_defines = os.path.join(legate_dir, "include", "realm_defines.h") - realm_pylib = read_c_define(realm_defines, "REALM_PYTHON_LIB") + realm_pylib = read_c_define(realm_defines_h, "REALM_PYTHON_LIB") realm_pyhome = os.path.dirname(os.path.dirname(realm_pylib.strip()[1:-1])) if curr_pyhome != realm_pyhome: print( @@ -192,60 +377,54 @@ def run_legate( # Set some environment variables depending on our configuration that we # will check in the Legate binary to ensure that it is properly configured # Always make sure we include the Legion library - if LIB_PATH not in cmd_env: - cmd_env[LIB_PATH] = os.path.join(legate_dir, "lib") - else: - cmd_env[LIB_PATH] = ( - os.path.join(legate_dir, "lib") + os.pathsep + cmd_env[LIB_PATH] - ) - cuda_config = os.path.join(legate_dir, "share", "legate", ".cuda.json") - cuda_dir = load_json_config(cuda_config) - if gpus > 0 and cuda_dir is None: - raise ValueError( - "Requested execution with GPUs but " - + "Legate was not built with GPU support" - ) + cmd_env[LIB_PATH] = os.pathsep.join( + [legion_lib_path, legate_lib_path] + + ([cmd_env[LIB_PATH]] if (LIB_PATH in cmd_env) else []) + ) + if gpus > 0: assert "LEGATE_NEED_CUDA" not in cmd_env cmd_env["LEGATE_NEED_CUDA"] = str(1) - cmd_env[LIB_PATH] += os.pathsep + os.path.join(cuda_dir, "lib") - cmd_env[LIB_PATH] += os.pathsep + os.path.join(cuda_dir, "lib64") + if openmp > 0: assert "LEGATE_NEED_OPENMP" not in cmd_env cmd_env["LEGATE_NEED_OPENMP"] = str(1) + if ranks > 1: assert "LEGATE_NEED_GASNET" not in cmd_env cmd_env["LEGATE_NEED_GASNET"] = str(1) + if progress: assert "LEGATE_SHOW_PROGREES" not in cmd_env cmd_env["LEGATE_SHOW_PROGRESS"] = str(1) + if no_tensor_cores: assert "LEGATE_DISABLE_TENSOR_CORES" not in cmd_env cmd_env["LEGATE_DISABLE_TENSOR_CORES"] = str(1) + if mem_usage: assert "LEGATE_SHOW_USAGE" not in cmd_env cmd_env["LEGATE_SHOW_USAGE"] = str(1) + # Configure certain limits - defines_path = os.path.join( - os.path.join(legate_dir, "include"), "legion_defines.h" - ) - if "LEGATE_MAX_DIM" not in os.environ: - cmd_env["LEGATE_MAX_DIM"] = read_c_define( - defines_path, "LEGION_MAX_DIM" - ) - assert cmd_env["LEGATE_MAX_DIM"] is not None - if "LEGATE_MAX_FIELDS" not in os.environ: - cmd_env["LEGATE_MAX_FIELDS"] = read_c_define( - defines_path, "LEGION_MAX_FIELDS" - ) - assert cmd_env["LEGATE_MAX_FIELDS"] is not None + cmd_env["LEGATE_MAX_DIM"] = os.environ.get( + "LEGATE_MAX_DIM" + ) or read_c_define(legion_defines_h, "LEGION_MAX_DIM") + cmd_env["LEGATE_MAX_FIELDS"] = os.environ.get( + "LEGATE_MAX_FIELDS" + ) or read_c_define(legion_defines_h, "LEGION_MAX_FIELDS") + assert cmd_env["LEGATE_MAX_DIM"] is not None + assert cmd_env["LEGATE_MAX_FIELDS"] is not None + # Special run modes if freeze_on_error: cmd_env["LEGION_FREEZE_ON_ERROR"] = str(1) + # Debugging options cmd_env["REALM_BACKTRACE"] = str(1) if gasnet_trace: cmd_env["GASNET_TRACEFILE"] = os.path.join(log_dir, "gasnet_%.log") + # Add launcher if launcher == "mpirun": # TODO: $OMPI_COMM_WORLD_RANK will only work for OpenMPI and IBM @@ -333,12 +512,11 @@ def run_legate( raise Exception("Unsupported launcher: %s" % launcher) cmd += launcher_extra # Add any wrappers before the executable - binary_dir = os.path.join(legate_dir, "bin") if any(f is not None for f in [cpu_bind, mem_bind, gpu_bind, nic_bind]): - cmd.append(os.path.join(binary_dir, "bind.sh")) + cmd.append(bind_sh_path) try: - conduit = read_conduit(legate_dir) + conduit = read_conduit(realm_defines_h) cmd += [launcher, conduit] except Exception: cmd += ["local", "local"] @@ -406,7 +584,7 @@ def run_legate( if memcheck: cmd += ["cuda-memcheck"] # Now we're ready to build the actual command to run - cmd += [os.path.join(binary_dir, "legion_python")] + cmd += [legion_python] # This has to go before script name if not_control_replicable: cmd += ["--nocr"] @@ -526,9 +704,7 @@ def run_legate( # we're done; make sure we only do this once if on a multi-rank run with # externally-managed launching if profile and (launcher != "none" or rank_id == "0"): - tools_dir = os.path.join(legate_dir, "share", "legate") - prof_py = os.path.join(tools_dir, "legion_prof.py") - prof_cmd = [str(prof_py), "-o", "legate_prof"] + prof_cmd = [str(legion_prof_py), "-o", "legate_prof"] for n in range(ranks): prof_cmd += ["legate_" + str(n) + ".prof"] if ranks // ranks_per_node > 4: @@ -552,9 +728,7 @@ def run_legate( os.remove(os.path.join(log_dir, "legate_" + str(n) + ".prof")) # Similarly for spy runs if (dataflow or event) and (launcher != "none" or rank_id == "0"): - tools_dir = os.path.join(legate_dir, "share", "legate") - spy_py = os.path.join(tools_dir, "legion_spy.py") - spy_cmd = [str(spy_py)] + spy_cmd = [str(legion_spy_py)] if dataflow and event: spy_cmd += ["-de"] elif dataflow: diff --git a/legate/_version.py b/legate/_version.py new file mode 100644 index 000000000..5e43ebaf4 --- /dev/null +++ b/legate/_version.py @@ -0,0 +1,703 @@ +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import functools +import os +import re +import subprocess +import sys +from typing import Callable, Dict + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "v" + cfg.parentdir_prefix = "legate-" + cfg.versionfile_source = "legate/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + + return decorate + + +def run_command( + commands, args, cwd=None, verbose=False, hide_stderr=False, env=None +): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + **popen_kwargs, + ) + break + except OSError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r"\d", r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner( + GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True + ) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner( + GITS, + ["describe", "--tags", "--dirty", "--always", "--long", *MATCH_ARGS], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner( + GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root + ) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ( + "unable to parse git-describe output: '%s'" % describe_out + ) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ + 0 + ].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post( + pieces["closest-tag"] + ) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % ( + post_version + 1, + pieces["distance"], + ) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords( + get_keywords(), cfg.tag_prefix, verbose + ) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split("/"): + root = os.path.dirname(root) + except NameError: + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/legate/core/install_info.py.in b/legate/core/install_info.py.in index 9e7b3a515..2e73489a9 100644 --- a/legate/core/install_info.py.in +++ b/legate/core/install_info.py.in @@ -1,4 +1,4 @@ -# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2020-2022, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation @@ -10,9 +10,21 @@ # # IMPORTANT: -# * header.py.in is used as an input to string.format() -# * header.py is a generated file and should not be modified by hand +# * install_info.py is a generated file and should not be modified by hand from __future__ import annotations -header: str = {header} -libpath: str = {libpath} +def get_libpath(): + import os, sys + join = os.path.join + exists = os.path.exists + dirname = os.path.dirname + lg_path = dirname(dirname(dirname(__file__))) + if exists(libdir := join(lg_path, "build", "lib")): + return libdir + if exists(libdir := join(dirname(dirname(sys.executable)), "lib")): + return libdir + return "" + + +libpath: str = get_libpath() +header: str = """@header@""" diff --git a/scripts/lgpatch.py b/legate/lgpatch.py similarity index 96% rename from scripts/lgpatch.py rename to legate/lgpatch.py index 4443fab8b..dd78dd5d9 100755 --- a/scripts/lgpatch.py +++ b/legate/lgpatch.py @@ -16,6 +16,7 @@ import sys import textwrap from argparse import ArgumentParser, RawDescriptionHelpFormatter +from typing import Any KNOWN_PATCHES = {"numpy": "cunumeric"} @@ -37,7 +38,7 @@ """ -def parse_args(): +def parse_args() -> Any: parser = ArgumentParser( prog="lgpatch", description=DESCRIPTION, @@ -79,7 +80,7 @@ def do_patch(name: str) -> None: raise RuntimeError(f"Could not import patch module {cuname}") -if __name__ == "__main__": +def main() -> None: args, extra = parse_args() for name in args.patch: @@ -89,3 +90,7 @@ def do_patch(name: str) -> None: with open(args.prog) as f: exec(f.read(), {"__name__": "__main__"}) + + +if __name__ == "__main__": + main() diff --git a/legate_core_cpp.cmake b/legate_core_cpp.cmake new file mode 100644 index 000000000..edfb706da --- /dev/null +++ b/legate_core_cpp.cmake @@ -0,0 +1,413 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +############################################################################## +# - User Options ------------------------------------------------------------ + +include(cmake/Modules/legate_core_options.cmake) + +############################################################################## +# - Project definition ------------------------------------------------------- + +# Write the version header +rapids_cmake_write_version_file(include/legate/version_config.hpp) + +# Needed to integrate with LLVM/clang tooling +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +############################################################################## +# - Build Type --------------------------------------------------------------- + +# Set a default build type if none was specified +rapids_cmake_build_type(Release) + +# ################################################################################################## +# * conda environment ----------------------------------------------------------------------------- +rapids_cmake_support_conda_env(conda_env MODIFY_PREFIX_PATH) + +############################################################################## +# - Dependencies ------------------------------------------------------------- + +# add third party dependencies using CPM +rapids_cpm_init(OVERRIDE ${CMAKE_CURRENT_SOURCE_DIR}/cmake/versions.json) + +macro(_find_package_Python3) + find_package(Python3 REQUIRED COMPONENTS Interpreter Development) + message(VERBOSE "legate.core: Has Python3: ${Python3_FOUND}") + message(VERBOSE "legate.core: Has Python 3 interpreter: ${Python3_Interpreter_FOUND}") + message(VERBOSE "legate.core: Python 3 include directories: ${Python3_INCLUDE_DIRS}") + message(VERBOSE "legate.core: Python 3 libraries: ${Python3_LIBRARIES}") + message(VERBOSE "legate.core: Python 3 library directories: ${Python3_LIBRARY_DIRS}") + message(VERBOSE "legate.core: Python 3 version: ${Python3_VERSION}") +endmacro() + +# CUDA initialization might need to happen at different times depending on +# how Legion is built. If building Legion inline, CUDA must be enabled +# BEFORE get_legion.cmake because of how Legion handles CMAKE_CUDA_ARCHITECURES. +# If using an external Legion, CUDA must be enabled AFTER get_legion.cmake. +# This function executes all the enable CUDA functions with a boolean guard +# to make sure it is only executed once. +macro(_enable_cuda_language) + if (NOT legate_core_CUDA_ENABLED) + include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/cuda_arch_helpers.cmake) + # Needs to run before `rapids_cuda_init_architectures` + set_cuda_arch_from_names() + # Must come before `enable_language(CUDA)` + rapids_cuda_init_architectures(legate_core) + # Enable the CUDA language + enable_language(CUDA) + # Since legate_core only enables CUDA optionally we need to manually include + # the file that rapids_cuda_init_architectures relies on `project` calling + if(CMAKE_PROJECT_legate_core_INCLUDE) + include("${CMAKE_PROJECT_legate_core_INCLUDE}") + endif() + # Must come after `enable_language(CUDA)` + # Use `-isystem ` instead of `-isystem=` + # because the former works with clangd intellisense + set(CMAKE_INCLUDE_SYSTEM_FLAG_CUDA "-isystem ") + # set to TRUE so the macro does not repeat if called again. + set(legate_core_CUDA_ENABLED TRUE) + endif() +endmacro() + +if(Legion_USE_Python) + _find_package_Python3() + if(Python3_FOUND AND Python3_VERSION) + set(Legion_Python_Version ${Python3_VERSION}) + endif() +endif() + +if(Legion_USE_CUDA) + _enable_cuda_language() +endif() + +### +# If we find Legion already configured on the system, it will report whether it +# was compiled with Python (Legion_USE_PYTHON), CUDA (Legion_USE_CUDA), OpenMP +# (Legion_USE_OpenMP), and GASNet (Legion_USE_GASNet). +# +# We use the same variables as Legion because we want to enable/disable each of +# these features based on how Legion was configured (it doesn't make sense to +# build legate.core's Python bindings if Legion's bindings weren't compiled). +### +include(cmake/thirdparty/get_legion.cmake) + +# If Legion_USE_Python was toggled ON by find_package(Legion), find Python3 +if(Legion_USE_Python AND (NOT Python3_FOUND)) + _find_package_Python3() +endif() + +if(Legion_USE_GASNet) + find_package(MPI REQUIRED) +endif() + +if(Legion_USE_CUDA) + # If CUDA has not yet been enabled, make sure it is now. + _enable_cuda_language() + # Find the CUDAToolkit + rapids_find_package( + CUDAToolkit REQUIRED + BUILD_EXPORT_SET legate-core-exports + INSTALL_EXPORT_SET legate-core-exports + ) + # Find NCCL + include(cmake/thirdparty/get_nccl.cmake) +endif() + +# Find or install Thrust +include(cmake/thirdparty/get_thrust.cmake) + +############################################################################## +# - legate.core -------------------------------------------------------------- + +set(legate_core_SOURCES "") +set(legate_core_CXX_DEFS "") +set(legate_core_CUDA_DEFS "") +set(legate_core_CXX_OPTIONS "") +set(legate_core_CUDA_OPTIONS "") + +include(cmake/Modules/set_cpu_arch_flags.cmake) +set_cpu_arch_flags(legate_core_CXX_OPTIONS) + +if(NOT CMAKE_BUILD_TYPE STREQUAL "Release") + list(APPEND legate_core_CXX_DEFS DEBUG_LEGATE) + list(APPEND legate_core_CUDA_DEFS DEBUG_LEGATE) +endif() + +if(Legion_USE_CUDA) + list(APPEND legate_core_CXX_DEFS LEGATE_USE_CUDA) + list(APPEND legate_core_CUDA_DEFS LEGATE_USE_CUDA) + + add_cuda_architecture_defines(legate_core_CUDA_DEFS) + + list(APPEND legate_core_CUDA_OPTIONS -Xfatbin=-compress-all) + list(APPEND legate_core_CUDA_OPTIONS --expt-extended-lambda) + list(APPEND legate_core_CUDA_OPTIONS --expt-relaxed-constexpr) +endif() + +if(Legion_USE_OpenMP) + list(APPEND legate_core_CXX_DEFS LEGATE_USE_OPENMP) + list(APPEND legate_core_CUDA_DEFS LEGATE_USE_OPENMP) +endif() + +if(Legion_USE_GASNet) + list(APPEND legate_core_CXX_DEFS LEGATE_USE_GASNET) + list(APPEND legate_core_CUDA_DEFS LEGATE_USE_GASNET) +endif() + +# Change THRUST_DEVICE_SYSTEM for `.cpp` files +# TODO: This is what we do in cuNumeric, should we do it here as well? +if(Legion_USE_OpenMP) + list(APPEND legate_core_CXX_OPTIONS -UTHRUST_DEVICE_SYSTEM) + list(APPEND legate_core_CXX_OPTIONS -DTHRUST_DEVICE_SYSTEM=THRUST_DEVICE_SYSTEM_OMP) +elseif(NOT Legion_USE_CUDA) + list(APPEND legate_core_CXX_OPTIONS -UTHRUST_DEVICE_SYSTEM) + list(APPEND legate_core_CXX_OPTIONS -DTHRUST_DEVICE_SYSTEM=THRUST_DEVICE_SYSTEM_CPP) +endif() +# Or should we only do it if OpenMP and CUDA are both disabled? +# if(NOT Legion_USE_OpenMP AND (NOT Legion_USE_CUDA)) +# list(APPEND legate_core_CXX_OPTIONS -UTHRUST_DEVICE_SYSTEM) +# list(APPEND legate_core_CXX_OPTIONS -DTHRUST_DEVICE_SYSTEM=THRUST_DEVICE_SYSTEM_CPP) +# endif() + +list(APPEND legate_core_SOURCES + src/core/legate_c.cc + src/core/comm/comm.cc + src/core/comm/comm_cpu.cc + src/core/comm/coll.cc + src/core/data/allocator.cc + src/core/data/scalar.cc + src/core/data/store.cc + src/core/data/transform.cc + src/core/mapping/base_mapper.cc + src/core/mapping/core_mapper.cc + src/core/mapping/instance_manager.cc + src/core/mapping/mapping.cc + src/core/mapping/task.cc + src/core/runtime/context.cc + src/core/runtime/projection.cc + src/core/runtime/runtime.cc + src/core/runtime/shard.cc + src/core/task/return.cc + src/core/task/task.cc + src/core/utilities/debug.cc + src/core/utilities/deserializer.cc + src/core/utilities/machine.cc + src/core/utilities/linearize.cc +) + +if(Legion_USE_GASNet) + list(APPEND legate_core_SOURCES + src/core/comm/alltoall_thread_mpi.cc + src/core/comm/alltoallv_thread_mpi.cc + src/core/comm/gather_thread_mpi.cc + src/core/comm/allgather_thread_mpi.cc + src/core/comm/bcast_thread_mpi.cc) +else() + list(APPEND legate_core_SOURCES + src/core/comm/alltoall_thread_local.cc + src/core/comm/alltoallv_thread_local.cc + src/core/comm/allgather_thread_local.cc) +endif() + +if(Legion_USE_CUDA) + list(APPEND legate_core_SOURCES + src/core/comm/comm_nccl.cu + src/core/cuda/stream_pool.cu) +endif() + +add_library(legate_core ${legate_core_SOURCES}) +add_library(legate::core ALIAS legate_core) + +set_target_properties(legate_core + PROPERTIES EXPORT_NAME core + LIBRARY_OUTPUT_NAME lgcore + BUILD_RPATH "\$ORIGIN" + INSTALL_RPATH "\$ORIGIN" + CXX_STANDARD 17 + CXX_STANDARD_REQUIRED ON + CUDA_STANDARD 17 + CUDA_STANDARD_REQUIRED ON + POSITION_INDEPENDENT_CODE ON + INTERFACE_POSITION_INDEPENDENT_CODE ON + LIBRARY_OUTPUT_DIRECTORY lib) + +# Add Conda library, and include paths if specified +if(TARGET conda_env) + target_link_libraries(legate_core PRIVATE conda_env) +endif() + +if(Legion_USE_CUDA) + if(legate_core_STATIC_CUDA_RUNTIME) + set_target_properties(legate_core PROPERTIES CUDA_RUNTIME_LIBRARY Static) + # Make sure to export to consumers what runtime we used + target_link_libraries(legate_core PUBLIC CUDA::cudart_static) + else() + set_target_properties(legate_core PROPERTIES CUDA_RUNTIME_LIBRARY Shared) + # Make sure to export to consumers what runtime we used + target_link_libraries(legate_core PUBLIC CUDA::cudart) + endif() +endif() + +target_link_libraries(legate_core + PUBLIC Legion::Legion + legate::Thrust + $ + PRIVATE $ + $) + +target_compile_options(legate_core + PRIVATE "$<$:${legate_core_CXX_OPTIONS}>" + "$<$:${legate_core_CUDA_OPTIONS}>") + +target_compile_definitions(legate_core + PUBLIC "$<$:${legate_core_CXX_DEFS}>" + "$<$:${legate_core_CUDA_DEFS}>") + +target_include_directories(legate_core + PUBLIC + $ + INTERFACE + $ +) + +if(Legion_USE_CUDA) + file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/fatbin.ld" + [=[ +SECTIONS +{ +.nvFatBinSegment : { *(.nvFatBinSegment) } +.nv_fatbin : { *(.nv_fatbin) } +} +]=]) + + # ensure CUDA symbols aren't relocated to the middle of the debug build binaries + target_link_options(legate_core PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/fatbin.ld") +endif() + +############################################################################## +# - install targets----------------------------------------------------------- + +include(CPack) +include(GNUInstallDirs) +rapids_cmake_install_lib_dir(lib_dir) + +install(TARGETS legate_core + DESTINATION ${lib_dir} + EXPORT legate-core-exports) + +install( + FILES src/legate.h + src/legate_defines.h + src/legate_preamble.h + ${CMAKE_CURRENT_BINARY_DIR}/include/legate/version_config.hpp + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate) + +install( + FILES src/core/legate_c.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core) + +install( + FILES src/core/comm/coll.h + src/core/comm/communicator.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core/comm) + +install( + FILES src/core/cuda/cuda_help.h + src/core/cuda/stream_pool.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core/cuda) + +install( + FILES src/core/data/allocator.h + src/core/data/buffer.h + src/core/data/scalar.h + src/core/data/scalar.inl + src/core/data/store.h + src/core/data/store.inl + src/core/data/transform.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core/data) + +install( + FILES src/core/mapping/base_mapper.h + src/core/mapping/mapping.h + src/core/mapping/task.h + src/core/mapping/task.inl + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core/mapping) + +install( + FILES src/core/runtime/context.h + src/core/runtime/runtime.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core/runtime) + +install( + FILES src/core/task/exception.h + src/core/task/return.h + src/core/task/task.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core/task) + +install( + FILES src/core/utilities/debug.h + src/core/utilities/deserializer.h + src/core/utilities/deserializer.inl + src/core/utilities/dispatch.h + src/core/utilities/machine.h + src/core/utilities/nvtx_help.h + src/core/utilities/span.h + src/core/utilities/type_traits.h + src/core/utilities/typedefs.h + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/legate/core/utilities) + +############################################################################## +# - install export ----------------------------------------------------------- + +set(doc_string + [=[ +Provide targets for Legate Core, the Foundation for All Legate Libraries. + +Imported Targets: + - legate::core + +]=]) + +string(JOIN "\n" code_string +[=[ +if(NOT TARGET legate::Thrust) + thrust_create_target(legate::Thrust FROM_OPTIONS) +endif() +]=] + "set(Legion_USE_CUDA ${Legion_USE_CUDA})" + "set(Legion_USE_OpenMP ${Legion_USE_OpenMP})" + "set(Legion_USE_Python ${Legion_USE_Python})" + "set(Legion_USE_GASNet ${Legion_USE_GASNet})" + "set(Legion_BOUNDS_CHECKS ${Legion_BOUNDS_CHECKS})" +) + +rapids_export( + INSTALL legate_core + EXPORT_SET legate-core-exports + GLOBAL_TARGETS core + NAMESPACE legate:: + DOCUMENTATION doc_string + FINAL_CODE_BLOCK code_string) + +# build export targets +rapids_export( + BUILD legate_core + EXPORT_SET legate-core-exports + GLOBAL_TARGETS core + NAMESPACE legate:: + DOCUMENTATION doc_string + FINAL_CODE_BLOCK code_string) diff --git a/legate_core_python.cmake b/legate_core_python.cmake new file mode 100644 index 000000000..e6797d4d5 --- /dev/null +++ b/legate_core_python.cmake @@ -0,0 +1,106 @@ +#============================================================================= +# Copyright 2022 NVIDIA Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +############################################################################## +# - User Options ------------------------------------------------------------ + +option(FIND_LEGATE_CORE_CPP "Search for existing legate_core C++ installations before defaulting to local files" + OFF) + +############################################################################## +# - Dependencies ------------------------------------------------------------- + +# If the user requested it we attempt to find legate_core. +if(FIND_LEGATE_CORE_CPP) + include("${rapids-cmake-dir}/export/detail/parse_version.cmake") + rapids_export_parse_version(${legate_core_version} legate_core parsed_ver) + rapids_find_package(legate_core ${parsed_ver} EXACT CONFIG + GLOBAL_TARGETS legate::core + BUILD_EXPORT_SET legate-core-python-exports + INSTALL_EXPORT_SET legate-core-python-exports) +else() + set(legate_core_FOUND OFF) +endif() + +if(NOT legate_core_FOUND) + set(SKBUILD OFF) + set(Legion_USE_Python ON) + set(Legion_BUILD_BINDINGS ON) + add_subdirectory(. "${CMAKE_CURRENT_SOURCE_DIR}/build") + set(SKBUILD ON) +endif() + +execute_process( + COMMAND ${CMAKE_C_COMPILER} + -E -DLEGATE_USE_PYTHON_CFFI + -I "${CMAKE_CURRENT_SOURCE_DIR}/core/src" + -P "${CMAKE_CURRENT_SOURCE_DIR}/src/core/legate_c.h" + ECHO_ERROR_VARIABLE + OUTPUT_VARIABLE header + COMMAND_ERROR_IS_FATAL ANY +) + +set(libpath "") +configure_file( + "${CMAKE_CURRENT_SOURCE_DIR}/legate/core/install_info.py.in" + "${CMAKE_CURRENT_SOURCE_DIR}/legate/core/install_info.py" +@ONLY) + +add_library(legate_core_python INTERFACE) +add_library(legate::core_python ALIAS legate_core_python) +target_link_libraries(legate_core_python INTERFACE legate::core) + +############################################################################## +# - install targets----------------------------------------------------------- + +include(CPack) +include(GNUInstallDirs) +rapids_cmake_install_lib_dir(lib_dir) + +install(TARGETS legate_core_python + DESTINATION ${lib_dir} + EXPORT legate-core-python-exports) + +############################################################################## +# - install export ----------------------------------------------------------- + +set(doc_string + [=[ +Provide targets for Legate Python, the Foundation for All Legate Libraries. + +Imported Targets: + - legate::core_python + +]=]) + +set(code_string "") + +rapids_export( + INSTALL legate_core_python + EXPORT_SET legate-core-python-exports + GLOBAL_TARGETS core_python + NAMESPACE legate:: + DOCUMENTATION doc_string + FINAL_CODE_BLOCK code_string) + +# build export targets +rapids_export( + BUILD legate_core_python + EXPORT_SET legate-core-python-exports + GLOBAL_TARGETS core_python + NAMESPACE legate:: + DOCUMENTATION doc_string + FINAL_CODE_BLOCK code_string) diff --git a/pyproject.toml b/pyproject.toml index d735d85b1..1039f0bd0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,26 @@ +# Copyright (c) 2021-2022, NVIDIA CORPORATION. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[build-system] +requires = [ + "wheel", + "ninja", + "setuptools", + "scikit-build>=0.13.1", + "cmake>=3.22.1,!=3.23.0", +] + [tool.black] line-length = 79 target-version = ["py36"] @@ -59,9 +82,10 @@ strict_equality = true warn_unused_configs = true -# For now enable each typed module individually. [[tool.mypy.overrides]] +# ignore certain auto-generated and utility files module = [ - "legate.*", + "legate._version", + "legate.__main__", ] -ignore_errors = false +ignore_errors = true diff --git a/scripts/build-install.sh b/scripts/build-install.sh new file mode 100755 index 000000000..2fb344daa --- /dev/null +++ b/scripts/build-install.sh @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +cd $(dirname "$(realpath "$0")")/.. + +# Use sccache if installed +source ./scripts/util/build-caching.sh +# Use consistent C[XX]FLAGS +source ./scripts/util/compiler-flags.sh +# Uninstall existing globally-installed Legion and legate_core (if installed) +source ./scripts/util/uninstall-global-legion-and-legate-core.sh + +# Remove existing build artifacts +rm -rf ./{build,_skbuild,dist,legate.core.egg-info} + +# Define CMake configuration arguments +cmake_args= + +# Use ninja-build if installed +if [[ -n "$(which ninja)" ]]; then cmake_args+="-GNinja"; fi + +# Add other build options here as desired +cmake_args+=" +-D Legion_USE_CUDA=ON +-D Legion_USE_OpenMP=ON +-D CMAKE_CUDA_ARCHITECTURES=NATIVE +"; + +# This won't be necessary once this PR is merged: +# https://gitlab.com/StanfordLegion/legion/-/merge_requests/523 +cmake_args+=" +-D Legion_CMAKE_INSTALL_PREFIX=$CONDA_PREFIX +" + +# Use all but 2 threads to compile +ninja_args="-j$(nproc --ignore=2)" + +# Build legion_core + legion_core_python and install into the current Python environment +SKBUILD_BUILD_OPTIONS="$ninja_args" \ +SKBUILD_CONFIGURE_OPTIONS="$cmake_args" \ + python -m pip install \ + --root / --prefix "$CONDA_PREFIX" \ + --no-deps --no-build-isolation \ + --upgrade \ + . -vv diff --git a/scripts/build-no-install.sh b/scripts/build-no-install.sh new file mode 100755 index 000000000..53d799136 --- /dev/null +++ b/scripts/build-no-install.sh @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +cd $(dirname "$(realpath "$0")")/.. + +# Use sccache if installed +source ./scripts/util/build-caching.sh +# Use consistent C[XX]FLAGS +source ./scripts/util/compiler-flags.sh + +# Remove existing build artifacts +rm -rf ./{build,_skbuild,dist,legate.core.egg-info} + +# Define CMake configuration arguments +cmake_args= + +# Use ninja-build if installed +if [[ -n "$(which ninja)" ]]; then cmake_args+="-GNinja"; fi + +# Add other build options here as desired +cmake_args+=" +-D Legion_USE_CUDA=ON +-D Legion_USE_OpenMP=ON +-D CMAKE_CUDA_ARCHITECTURES=NATIVE +"; + +# Use all but 2 threads to compile +ninja_args="-j$(nproc --ignore=2)" + +# Build legion_core + legion_core_python and perform an "editable" install +SKBUILD_BUILD_OPTIONS="$ninja_args" \ +SKBUILD_CONFIGURE_OPTIONS="$cmake_args" \ + python -m pip install \ + --root / --prefix "$CONDA_PREFIX" \ + --no-deps --no-build-isolation \ + --editable \ + . -vv diff --git a/scripts/build-separately-no-install.sh b/scripts/build-separately-no-install.sh new file mode 100755 index 000000000..6401b4456 --- /dev/null +++ b/scripts/build-separately-no-install.sh @@ -0,0 +1,56 @@ +#! /usr/bin/env bash + +cd $(dirname "$(realpath "$0")")/.. + +# Use sccache if installed +source ./scripts/util/build-caching.sh +# Use consistent C[XX]FLAGS +source ./scripts/util/compiler-flags.sh + +# Remove existing build artifacts +rm -rf ./{build,_skbuild,dist,legate.core.egg-info} + +# Define CMake configuration arguments +cmake_args= + +# Use ninja-build if installed +if [[ -n "$(which ninja)" ]]; then cmake_args+="-GNinja"; fi + +# Add other build options here as desired +cmake_args+=" +-D Legion_USE_CUDA=ON +-D Legion_USE_OpenMP=ON +-D Legion_USE_Python=ON +-D Legion_BUILD_BINDINGS=ON +-D CMAKE_CUDA_ARCHITECTURES=NATIVE +"; + +# Use all but 2 threads to compile +ninja_args="-j$(nproc --ignore=2)" + +# Configure legate_core C++ +cmake -S . -B build ${cmake_args} + +# Build legate_core C++ +cmake --build build ${ninja_args} + +# Pretend to install Legion because Legion's CMakeLists only generates the Legion CFFI bindings at install time +( + tmpdir=$(mktemp -d); + cmake --install build/_deps/legion-build --prefix "$tmpdir" &>/dev/null; + rm -rf "$tmpdir"; +) + +cmake_args+=" +-D FIND_LEGATE_CORE_CPP=ON +-D legate_core_ROOT=$(pwd)/build +" + +# Build legion_core_python and perform an "editable" install +SKBUILD_BUILD_OPTIONS="$ninja_args" \ +SKBUILD_CONFIGURE_OPTIONS="$cmake_args" \ + python -m pip install \ + --root / --prefix "$CONDA_PREFIX" \ + --no-deps --no-build-isolation \ + --editable \ + . -vv diff --git a/scripts/build-with-legion-no-install.sh b/scripts/build-with-legion-no-install.sh new file mode 100755 index 000000000..e3bd76304 --- /dev/null +++ b/scripts/build-with-legion-no-install.sh @@ -0,0 +1,49 @@ +#! /usr/bin/env bash + +cd $(dirname "$(realpath "$0")")/.. + +# Use sccache if installed +source ./scripts/util/build-caching.sh +# Use consistent C[XX]FLAGS +source ./scripts/util/compiler-flags.sh +# Read Legion_ROOT from the environment or prompt the user to enter it +source ./scripts/util/read-legion-root.sh "$0" + +# Remove existing build artifacts +rm -rf ./{build,_skbuild,dist,legate.core.egg-info} + +# Use all but 2 threads to compile +ninja_args="-j$(nproc --ignore=2)" + +# Pretend to install Legion because Legion's CMakeLists only generates the Legion CFFI bindings at install time +if [[ -f "$Legion_ROOT/CMakeCache.txt" ]]; then +( + tmpdir=$(mktemp -d); + cmake --build "$Legion_ROOT" ${ninja_args}; + cmake --install "$Legion_ROOT" --prefix "$tmpdir" &>/dev/null; + rm -rf "$tmpdir"; +) +fi + +# Define CMake configuration arguments +cmake_args= + +# Use ninja-build if installed +if [[ -n "$(which ninja)" ]]; then cmake_args+="-GNinja"; fi + +# Add other build options here as desired +cmake_args+=" +-D Legion_USE_CUDA=ON +-D Legion_USE_OpenMP=ON +-D CMAKE_CUDA_ARCHITECTURES=NATIVE +-D Legion_ROOT:STRING=\"$Legion_ROOT\" +"; + +# Build legion_core + legion_core_python and perform an "editable" install +SKBUILD_BUILD_OPTIONS="$ninja_args" \ +SKBUILD_CONFIGURE_OPTIONS="$cmake_args" \ + python -m pip install \ + --root / --prefix "$CONDA_PREFIX" \ + --no-deps --no-build-isolation \ + --editable \ + . -vv diff --git a/scripts/build-with-legion-separately-no-install.sh b/scripts/build-with-legion-separately-no-install.sh new file mode 100755 index 000000000..55c5610e1 --- /dev/null +++ b/scripts/build-with-legion-separately-no-install.sh @@ -0,0 +1,57 @@ +#! /usr/bin/env bash + +cd $(dirname "$(realpath "$0")")/.. + +# Use sccache if installed +source ./scripts/util/build-caching.sh +# Use consistent C[XX]FLAGS +source ./scripts/util/compiler-flags.sh +# Read Legion_ROOT from the environment or prompt the user to enter it +source ./scripts/util/read-legion-root.sh "$0" + +# Remove existing build artifacts +rm -rf ./{build,_skbuild,dist,legate.core.egg-info} + +# Use all but 2 threads to compile +ninja_args="-j$(nproc --ignore=2)" + +# Pretend to install Legion because Legion's CMakeLists only generates the Legion CFFI bindings at install time +if [[ -f "$Legion_ROOT/CMakeCache.txt" ]]; then +( + tmpdir=$(mktemp -d); + cmake --build "$Legion_ROOT" ${ninja_args}; + cmake --install "$Legion_ROOT" --prefix "$tmpdir" &>/dev/null; + rm -rf "$tmpdir"; +) +fi + +# Define CMake configuration arguments +cmake_args= + +# Use ninja-build if installed +if [[ -n "$(which ninja)" ]]; then cmake_args+="-GNinja"; fi + +# Add other build options here as desired +cmake_args+=" +-D CMAKE_CUDA_ARCHITECTURES=NATIVE +-D Legion_ROOT:STRING=\"$Legion_ROOT\" +"; + +# Configure legate_core C++ +cmake -S . -B build ${cmake_args} +# Build legate_core C++ +cmake --build build ${ninja_args} + +cmake_args+=" +-D FIND_LEGATE_CORE_CPP=ON +-D legate_core_ROOT:STRING=\"$(pwd)/build\" +" + +# Build legion_core_python and perform an "editable" install +SKBUILD_BUILD_OPTIONS="$ninja_args" \ +SKBUILD_CONFIGURE_OPTIONS="$cmake_args" \ + python -m pip install \ + --root / --prefix "$CONDA_PREFIX" \ + --no-deps --no-build-isolation \ + --editable \ + . -vv diff --git a/scripts/conda-build.sh b/scripts/conda-build.sh new file mode 100755 index 000000000..3356e4477 --- /dev/null +++ b/scripts/conda-build.sh @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# mamba create -n legate_core_build python=$PYTHON_VERSION boa git + +cd $(dirname "$(realpath "$0")")/.. + +mkdir -p /tmp/conda-build/legate_core +rm -rf /tmp/conda-build/legate_core/* + +PYTHON_VERSION="${PYTHON_VERSION:-3.9}" + +CUDA="$(nvcc --version | head -n4 | tail -n1 | cut -d' ' -f5 | cut -d',' -f1).*" \ +conda mambabuild \ + --numpy 1.22 \ + --python $PYTHON_VERSION \ + --override-channels \ + -c conda-forge -c nvidia \ + --croot /tmp/conda-build/legate_core \ + --no-test \ + --no-verify \ + --build-id-pat='' \ + --merge-build-host \ + --no-include-recipe \ + --no-anaconda-upload \ + --variants "{gpu_enabled: 'true', python: $PYTHON_VERSION}" \ + ./conda/conda-build diff --git a/scripts/util/build-caching.sh b/scripts/util/build-caching.sh new file mode 100755 index 000000000..70de985d3 --- /dev/null +++ b/scripts/util/build-caching.sh @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +if [[ -n "$(which sccache)" ]]; then + # Use sccache if installed + CMAKE_C_COMPILER_LAUNCHER="${CMAKE_C_COMPILER_LAUNCHER:-$(which sccache)}"; + CMAKE_CXX_COMPILER_LAUNCHER="${CMAKE_CXX_COMPILER_LAUNCHER:-$(which sccache)}"; + CMAKE_CUDA_COMPILER_LAUNCHER="${CMAKE_CUDA_COMPILER_LAUNCHER:-$(which sccache)}"; +elif [[ -n "$(which ccache)" ]]; then + # Use ccache if installed + CMAKE_C_COMPILER_LAUNCHER="${CMAKE_C_COMPILER_LAUNCHER:-$(which cache)}"; + CMAKE_CXX_COMPILER_LAUNCHER="${CMAKE_CXX_COMPILER_LAUNCHER:-$(which cache)}"; + CMAKE_CUDA_COMPILER_LAUNCHER="${CMAKE_CUDA_COMPILER_LAUNCHER:-$(which cache)}"; +fi + +export CMAKE_C_COMPILER_LAUNCHER="$CMAKE_C_COMPILER_LAUNCHER" +export CMAKE_CXX_COMPILER_LAUNCHER="$CMAKE_CXX_COMPILER_LAUNCHER" +export CMAKE_CUDA_COMPILER_LAUNCHER="$CMAKE_CUDA_COMPILER_LAUNCHER" diff --git a/scripts/util/compiler-flags.sh b/scripts/util/compiler-flags.sh new file mode 100755 index 000000000..981348ceb --- /dev/null +++ b/scripts/util/compiler-flags.sh @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +export CFLAGS="-w -fdiagnostics-color=always" +export CXXFLAGS="-w -fdiagnostics-color=always" +export CUDAFLAGS="-w -Xcompiler=-w,-fdiagnostics-color=always" +export CMAKE_BUILD_PARALLEL_LEVEL="${CMAKE_BUILD_PARALLEL_LEVEL:-${JOBS:-${PARALLEL_LEVEL:-$(nproc --ignore=2)}}}" + +if [[ -z "$LIBRARY_PATH" ]]; then + if [[ -d "${CUDA_HOME:-/usr/local/cuda}/lib64/stubs" ]]; then + export LIBRARY_PATH="${CUDA_HOME:-/usr/local/cuda}/lib64/stubs" + fi +fi diff --git a/scripts/util/read-legion-root.sh b/scripts/util/read-legion-root.sh new file mode 100755 index 000000000..03cc970c4 --- /dev/null +++ b/scripts/util/read-legion-root.sh @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +# Read Legion_ROOT from the environment or prompt the user to enter it +if [[ -z "$Legion_ROOT" ]]; then + while [[ -z "$Legion_ROOT" || ! -d "$Legion_ROOT" ]]; do + read -ep "\`\$Legion_ROOT\` not found. +Please enter the path to a Legion build (or install) directory: +" Legion_ROOT =1.22 + # TODO: Add rest of install dependencies +python_requires = >=3.8 diff --git a/setup.py b/setup.py index c638d0dd4..2b36b803b 100755 --- a/setup.py +++ b/setup.py @@ -15,71 +15,60 @@ # limitations under the License. # -import argparse -import os -import subprocess -import sys -from distutils.command.build_py import build_py -from distutils.core import setup - from setuptools import find_packages +from skbuild import setup -# We need to know the prefix for the installation -# so we can know where to get the library -parser = argparse.ArgumentParser() -parser.add_argument("--prefix", required=False) -parser.add_argument( - "--recurse", required=False, default=False, action="store_true" -) -args, _ = parser.parse_known_args() - +import versioneer -class my_build_py(build_py): - def run(self): - if not self.dry_run: - self.mkpath(self.build_lib) - # Compile up our C header here and insert it as a string - # into legate_core_cffi.py so that it is installed with - # the python library directly - root_dir = os.path.dirname(os.path.realpath(__file__)) - header_src = os.path.join(root_dir, "src", "core", "legate_c.h") - output_dir = os.path.join(root_dir, "legate", "core") - include_dir = os.path.join(args.prefix, "include") - header = subprocess.check_output( - [ - os.getenv("CC", "gcc"), - "-E", - "-DLEGATE_USE_PYTHON_CFFI", - "-I" + str(include_dir), - "-P", - header_src, - ] - ).decode("utf-8") - libpath = os.path.join(args.prefix, "lib") - with open(os.path.join(output_dir, "install_info.py.in")) as f: - content = f.read() - content = content.format( - header=repr(header), libpath=repr(libpath) - ) - with open(os.path.join(output_dir, "install_info.py"), "wb") as f: - f.write(content.encode("utf-8")) - build_py.run(self) - - -# If we haven't been called from install.py then do that first -if args.recurse: - # Remove the recurse argument from the list - sys.argv.remove("--recurse") - setup( - name="legate.core", - version="22.10.00", - packages=find_packages( - where=".", - include=["legate*"], - ), - cmdclass={"build_py": my_build_py}, - ) -else: - with open("install.py") as f: - code = compile(f.read(), "install.py", "exec") - exec(code) +setup( + name="legate.core", + version=versioneer.get_version(), + description="legate.core - The Foundation for All Legate Libraries", + url="https://github.com/nv-legate/legate.core", + author="NVIDIA Corporation", + license="Apache 2.0", + classifiers=[ + "Intended Audience :: Developers", + "Topic :: Database", + "Topic :: Scientific/Engineering", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + ], + extras_require={ + "test": [ + "colorama", + "coverage", + "mock", + "mypy>=0.961", + "pynvml", + "pytest-cov", + "pytest", + ] + }, + packages=find_packages( + where=".", + include=[ + "legate", + "legate.*", + "legate.core", + "legate.core.*", + "legate.timing", + "legate.timing.*", + ], + ), + entry_points={ + "console_scripts": [ + "legate = legate.__main__:driver", + "lgpatch = legate.lgpatch:main", + ] + }, + scripts=[ + "bind.sh", + ], + cmdclass=versioneer.get_cmdclass(), + install_requires=["numpy>=1.22"], + zip_safe=False, +) diff --git a/src/core/task/return.cc b/src/core/task/return.cc index 4307887ee..785000db6 100644 --- a/src/core/task/return.cc +++ b/src/core/task/return.cc @@ -26,7 +26,7 @@ #include "core/task/return.h" #include "core/utilities/machine.h" #ifdef LEGATE_USE_CUDA -#include +#include #endif using namespace Legion; diff --git a/src/core/utilities/debug.h b/src/core/utilities/debug.h index a0a4e9fa2..e6f52897f 100644 --- a/src/core/utilities/debug.h +++ b/src/core/utilities/debug.h @@ -18,6 +18,10 @@ #include "core/data/store.h" +#ifdef LEGATE_USE_CUDA +#include +#endif + #include namespace legate { diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 000000000..159ace09b --- /dev/null +++ b/versioneer.py @@ -0,0 +1,2222 @@ +# flake8: noqa: E501 + +# Version: 0.22 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/python-versioneer/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible with: Python 3.6, 3.7, 3.8, 3.9, 3.10 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] + +This is a tool for managing a recorded version number in distutils/setuptools-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere in your $PATH +* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) +* run `versioneer install` in your source tree, commit the results +* Verify version information with `python setup.py version` + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes). + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/python-versioneer/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other languages) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer +* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools + plugin + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer + +""" +# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring +# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements +# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error +# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with +# pylint:disable=attribute-defined-outside-init,too-many-arguments + +import configparser +import errno +import functools +import json +import os +import re +import subprocess +import sys +from typing import Callable, Dict + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ( + "Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND')." + ) + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + my_path = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(my_path)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print( + "Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(my_path), versioneer_py) + ) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise OSError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.ConfigParser() + with open(setup_cfg, "r") as cfg_file: + parser.read_file(cfg_file) + VCS = parser.get("versioneer", "VCS") # mandatory + + # Dict-like interface for non-mandatory entries + section = parser["versioneer"] + + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = section.get("style", "") + cfg.versionfile_source = section.get("versionfile_source") + cfg.versionfile_build = section.get("versionfile_build") + cfg.tag_prefix = section.get("tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = section.get("parentdir_prefix") + cfg.verbose = section.get("verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + HANDLERS.setdefault(vcs, {})[method] = f + return f + + return decorate + + +def run_command( + commands, args, cwd=None, verbose=False, hide_stderr=False, env=None +): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + **popen_kwargs, + ) + break + except OSError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +LONG_VERSION_PY[ + "git" +] = r''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys +from typing import Callable, Dict +import functools + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None), **popen_kwargs) + break + except OSError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r'\d', r)} + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + MATCH_ARGS = ["--match", "%%s*" %% tag_prefix] if tag_prefix else [] + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", *MATCH_ARGS], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) + else: + rendered += ".post0.dev%%d" %% (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r"\d", r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner( + GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True + ) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner( + GITS, + ["describe", "--tags", "--dirty", "--always", "--long", *MATCH_ARGS], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner( + GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root + ) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ( + "unable to parse git-describe output: '%s'" % describe_out + ) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ + 0 + ].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + my_path = __file__ + if my_path.endswith(".pyc") or my_path.endswith(".pyo"): + my_path = os.path.splitext(my_path)[0] + ".py" + versioneer_file = os.path.relpath(my_path) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + with open(".gitattributes", "r") as fobj: + for line in fobj: + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + break + except OSError: + pass + if not present: + with open(".gitattributes", "a+") as fobj: + fobj.write(f"{versionfile_source} export-subst\n") + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.22) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except OSError: + raise NotThisMethod("unable to read _version.py") + mo = re.search( + r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, + re.M | re.S, + ) + if not mo: + mo = re.search( + r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, + re.M | re.S, + ) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps( + versions, sort_keys=True, indent=1, separators=(",", ": ") + ) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver): + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces): + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post( + pieces["closest-tag"] + ) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % ( + post_version + 1, + pieces["distance"], + ) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert ( + cfg.versionfile_source is not None + ), "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(cmdclass=None): + """Get the custom setuptools/distutils subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 + + cmds = {} if cmdclass is None else cmdclass.copy() + + # we add "version" to both distutils and setuptools + try: + from setuptools import Command + except ImportError: + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "build_py" in cmds: + _build_py = cmds["build_py"] + elif "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join( + self.build_lib, cfg.versionfile_build + ) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + cmds["build_py"] = cmd_build_py + + if "build_ext" in cmds: + _build_ext = cmds["build_ext"] + elif "setuptools" in sys.modules: + from setuptools.command.build_ext import build_ext as _build_ext + else: + from distutils.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + target_versionfile = os.path.join( + self.build_lib, cfg.versionfile_build + ) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + cmds["build_ext"] = cmd_build_ext + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if "py2exe" in sys.modules: # py2exe enabled? + from py2exe.distutils_buildexe import py2exe as _py2exe + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "sdist" in cmds: + _sdist = cmds["sdist"] + elif "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file( + target_versionfile, self._versioneer_generated_versions + ) + + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +OLD_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + +INIT_PY_SNIPPET = """ +from . import {0} +__version__ = {0}.get_versions()['version'] +""" + + +def do_setup(): + """Do main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except ( + OSError, + configparser.NoSectionError, + configparser.NoOptionError, + ) as e: + if isinstance(e, (OSError, configparser.NoSectionError)): + print( + "Adding sample versioneer config to setup.cfg", file=sys.stderr + ) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except OSError: + old = "" + module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] + snippet = INIT_PY_SNIPPET.format(module) + if OLD_SNIPPET in old: + print(" replacing boilerplate in %s" % ipy) + with open(ipy, "w") as f: + f.write(old.replace(OLD_SNIPPET, snippet)) + elif snippet not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(snippet) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except OSError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print( + " appending versionfile_source ('%s') to MANIFEST.in" + % cfg.versionfile_source + ) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1)