From 93d86f2732ec420c1d317829835f47efd4613663 Mon Sep 17 00:00:00 2001 From: Raymond Douglass Date: Mon, 23 Jan 2023 10:34:26 -0500 Subject: [PATCH 01/25] DOC --- CMakeLists.txt | 4 ++-- doxygen/Doxyfile | 2 +- python/CMakeLists.txt | 4 ++-- python/docs/conf.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 08f4339a2..4cbb6c09b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,7 +15,7 @@ cmake_minimum_required(VERSION 3.23.1 FATAL_ERROR) if(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/RMM_RAPIDS.cmake) - file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.02/RAPIDS.cmake + file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.04/RAPIDS.cmake ${CMAKE_CURRENT_BINARY_DIR}/RMM_RAPIDS.cmake) endif() include(${CMAKE_CURRENT_BINARY_DIR}/RMM_RAPIDS.cmake) @@ -27,7 +27,7 @@ include(rapids-find) project( RMM - VERSION 23.02.00 + VERSION 23.04.00 LANGUAGES CXX) # Write the version header diff --git a/doxygen/Doxyfile b/doxygen/Doxyfile index c58a45a8f..c4192f793 100644 --- a/doxygen/Doxyfile +++ b/doxygen/Doxyfile @@ -38,7 +38,7 @@ PROJECT_NAME = "RMM" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 23.02 +PROJECT_NUMBER = 23.04 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 1b4d2c5e3..705f40ac7 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -14,9 +14,9 @@ cmake_minimum_required(VERSION 3.23.1 FATAL_ERROR) -set(rmm_version 23.02.00) +set(rmm_version 23.04.00) -file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.02/RAPIDS.cmake +file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.04/RAPIDS.cmake ${CMAKE_BINARY_DIR}/RAPIDS.cmake) include(${CMAKE_BINARY_DIR}/RAPIDS.cmake) diff --git a/python/docs/conf.py b/python/docs/conf.py index ec5ba5b61..31e3ba44f 100644 --- a/python/docs/conf.py +++ b/python/docs/conf.py @@ -23,9 +23,9 @@ # built documents. # # The short X.Y version. -version = "23.02" +version = "23.04" # The full version, including alpha/beta/rc tags. -release = "23.02.00" +release = "23.04.00" # -- General configuration --------------------------------------------------- From 421880002a1d94bdde7ea43e155d5f8dc0fa9648 Mon Sep 17 00:00:00 2001 From: Bradley Dice Date: Mon, 6 Feb 2023 11:59:21 -0600 Subject: [PATCH 02/25] Use date in build string instead of in the version. (#1195) Moves date information from the version to the build string. This will help with installing PR artifacts and nightly builds locally and in downstream CI workflows. cc: @ajschmidt8 Authors: - Bradley Dice (https://github.com/bdice) - AJ Schmidt (https://github.com/ajschmidt8) Approvers: - Ray Douglass (https://github.com/raydouglass) URL: https://github.com/rapidsai/rmm/pull/1195 --- conda/recipes/librmm/meta.yaml | 9 +++++---- conda/recipes/rmm/meta.yaml | 7 ++++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/conda/recipes/librmm/meta.yaml b/conda/recipes/librmm/meta.yaml index 720cf97c4..61174c7b7 100644 --- a/conda/recipes/librmm/meta.yaml +++ b/conda/recipes/librmm/meta.yaml @@ -1,9 +1,10 @@ -# Copyright (c) 2018-2022, NVIDIA CORPORATION. +# Copyright (c) 2018-2023, NVIDIA CORPORATION. -{% set version = environ.get('GIT_DESCRIBE_TAG', '0.0.0.dev').lstrip('v') + environ.get('VERSION_SUFFIX', '') %} +{% set version = environ.get('GIT_DESCRIBE_TAG', '0.0.0.dev').lstrip('v') %} {% set cuda_version = '.'.join(environ['RAPIDS_CUDA_VERSION'].split('.')[:2]) %} {% set cuda_major = cuda_version.split('.')[0] %} {% set cuda_spec = ">=" + cuda_major ~ ",<" + (cuda_major | int + 1) ~ ".0a0" %} # i.e. >=11,<12.0a0 +{% set date_string = environ['RAPIDS_DATE_STRING'] %} package: name: librmm-split @@ -43,7 +44,7 @@ outputs: script: install_librmm.sh build: number: {{ GIT_DESCRIBE_NUMBER }} - string: cuda{{ cuda_major }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} + string: cuda{{ cuda_major }}_{{ date_string }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} run_exports: - {{ pin_subpackage("librmm", max_pin="x.x") }} ignore_run_exports_from: @@ -97,7 +98,7 @@ outputs: script: install_librmm_tests.sh build: number: {{ GIT_DESCRIBE_NUMBER }} - string: cuda{{ cuda_major }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} + string: cuda{{ cuda_major }}_{{ date_string }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} ignore_run_exports_from: - {{ compiler('cuda') }} requirements: diff --git a/conda/recipes/rmm/meta.yaml b/conda/recipes/rmm/meta.yaml index 18f6011e5..7cf116dc0 100644 --- a/conda/recipes/rmm/meta.yaml +++ b/conda/recipes/rmm/meta.yaml @@ -1,9 +1,10 @@ -# Copyright (c) 2019-2022, NVIDIA CORPORATION. +# Copyright (c) 2019-2023, NVIDIA CORPORATION. -{% set version = environ.get('GIT_DESCRIBE_TAG', '0.0.0.dev').lstrip('v') + environ.get('VERSION_SUFFIX', '') %} +{% set version = environ.get('GIT_DESCRIBE_TAG', '0.0.0.dev').lstrip('v') %} {% set cuda_version = '.'.join(environ['RAPIDS_CUDA_VERSION'].split('.')[:2]) %} {% set cuda_major = cuda_version.split('.')[0] %} {% set py_version = environ['CONDA_PY'] %} +{% set date_string = environ['RAPIDS_DATE_STRING'] %} package: name: rmm @@ -14,7 +15,7 @@ source: build: number: {{ GIT_DESCRIBE_NUMBER }} - string: cuda{{ cuda_major }}_py{{ py_version }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} + string: cuda{{ cuda_major }}_py{{ py_version }}_{{ date_string }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} script_env: - CMAKE_GENERATOR - CMAKE_C_COMPILER_LAUNCHER From 1b6eaafa9e048bfd63f4ffe8b0e451883e77775f Mon Sep 17 00:00:00 2001 From: AJ Schmidt Date: Wed, 8 Feb 2023 10:48:44 -0500 Subject: [PATCH 03/25] Update shared workflow branches (#1203) This PR updates the branch reference used for our shared workflows. Authors: - AJ Schmidt (https://github.com/ajschmidt8) Approvers: - Ray Douglass (https://github.com/raydouglass) URL: https://github.com/rapidsai/rmm/pull/1203 --- .github/workflows/build.yaml | 12 ++++++------ .github/workflows/pr.yaml | 18 +++++++++--------- .github/workflows/test.yaml | 6 +++--- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 623ade564..ef346a186 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -28,7 +28,7 @@ concurrency: jobs: cpp-build: secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-build.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-build.yaml@branch-23.04 with: build_type: ${{ inputs.build_type || 'branch' }} branch: ${{ inputs.branch }} @@ -37,7 +37,7 @@ jobs: python-build: needs: [cpp-build] secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-build.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-build.yaml@branch-23.04 with: build_type: ${{ inputs.build_type || 'branch' }} branch: ${{ inputs.branch }} @@ -46,7 +46,7 @@ jobs: upload-conda: needs: [cpp-build, python-build] secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-upload-packages.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-upload-packages.yaml@branch-23.04 with: build_type: ${{ inputs.build_type || 'branch' }} branch: ${{ inputs.branch }} @@ -56,7 +56,7 @@ jobs: if: ${{ startsWith(github.ref, 'refs/heads/branch-') }} needs: python-build secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/custom-job.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/custom-job.yaml@branch-23.04 with: build_type: branch node_type: "gpu-latest-1" @@ -65,7 +65,7 @@ jobs: run_script: "ci/build_docs.sh" wheel-build: secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-build.yml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-build.yml@branch-23.04 with: build_type: ${{ inputs.build_type || 'branch' }} branch: ${{ inputs.branch }} @@ -77,7 +77,7 @@ jobs: wheel-publish: needs: wheel-build secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-publish.yml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-publish.yml@branch-23.04 with: build_type: ${{ inputs.build_type || 'branch' }} branch: ${{ inputs.branch }} diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 946dd4270..d8f0b0d3c 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -21,38 +21,38 @@ jobs: - wheel-build - wheel-tests secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/pr-builder.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/pr-builder.yaml@branch-23.04 checks: secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/checks.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/checks.yaml@branch-23.04 conda-cpp-build: needs: checks secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-build.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-build.yaml@branch-23.04 with: build_type: pull-request conda-cpp-tests: needs: conda-cpp-build secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-tests.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-tests.yaml@branch-23.04 with: build_type: pull-request conda-python-build: needs: conda-cpp-build secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-build.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-build.yaml@branch-23.04 with: build_type: pull-request conda-python-tests: needs: conda-python-build secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-tests.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-tests.yaml@branch-23.04 with: build_type: pull-request docs-build: needs: conda-python-build secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/custom-job.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/custom-job.yaml@branch-23.04 with: build_type: pull-request node_type: "gpu-latest-1" @@ -62,7 +62,7 @@ jobs: wheel-build: needs: checks secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-build.yml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-build.yml@branch-23.04 with: build_type: pull-request package-dir: python @@ -71,7 +71,7 @@ jobs: wheel-tests: needs: wheel-build secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-test.yml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-test.yml@branch-23.04 with: build_type: pull-request package-name: rmm diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 803816df9..f4ef540fe 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -16,7 +16,7 @@ on: jobs: cpp-tests: secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-tests.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-cpp-tests.yaml@branch-23.04 with: build_type: nightly branch: ${{ inputs.branch }} @@ -24,7 +24,7 @@ jobs: sha: ${{ inputs.sha }} python-tests: secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-tests.yaml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/conda-python-tests.yaml@branch-23.04 with: build_type: nightly branch: ${{ inputs.branch }} @@ -32,7 +32,7 @@ jobs: sha: ${{ inputs.sha }} wheel-tests: secrets: inherit - uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-test.yml@branch-23.02 + uses: rapidsai/shared-action-workflows/.github/workflows/wheels-manylinux-test.yml@branch-23.04 with: build_type: nightly branch: ${{ inputs.branch }} From 91d5cbdc70121c8e1d4f71bf4b9401613b855cb6 Mon Sep 17 00:00:00 2001 From: Ajay Thorve Date: Wed, 8 Feb 2023 13:17:51 -0800 Subject: [PATCH 04/25] Reduce error handling verbosity in CI tests scripts (#1204) This PR adds a less verbose [trap method](https://github.com/rapidsai/cugraph/blob/f2b081075704aabc789603e14ce552eac3fbe692/ci/test.sh#L19), for error handling to help ensure that we capture all potential error codes in our test scripts, and works as follows: - setting an environment variable, EXITCODE, with a default value of 0 - setting a trap statement triggered by ERR signals which will set EXITCODE=1 when any commands return a non-zero exit code cc @ajschmidt8 Authors: - Ajay Thorve (https://github.com/AjayThorve) - AJ Schmidt (https://github.com/ajschmidt8) Approvers: - AJ Schmidt (https://github.com/ajschmidt8) URL: https://github.com/rapidsai/rmm/pull/1204 --- ci/build_cpp.sh | 1 + ci/build_docs.sh | 2 ++ ci/build_python.sh | 1 + ci/check_style.sh | 2 +- ci/test_cpp.sh | 13 +++++-------- ci/test_python.sh | 14 +++++--------- 6 files changed, 15 insertions(+), 18 deletions(-) diff --git a/ci/build_cpp.sh b/ci/build_cpp.sh index 6f8a1375c..bc6e18021 100755 --- a/ci/build_cpp.sh +++ b/ci/build_cpp.sh @@ -1,4 +1,5 @@ #!/bin/bash +# Copyright (c) 2020-2023, NVIDIA CORPORATION. set -euo pipefail diff --git a/ci/build_docs.sh b/ci/build_docs.sh index 89a85fad2..dc7d54493 100755 --- a/ci/build_docs.sh +++ b/ci/build_docs.sh @@ -1,4 +1,6 @@ #!/bin/bash +# Copyright (c) 2020-2023, NVIDIA CORPORATION. + set -euo pipefail rapids-logger "Create test conda environment" diff --git a/ci/build_python.sh b/ci/build_python.sh index ae4294cc5..b306d3e47 100755 --- a/ci/build_python.sh +++ b/ci/build_python.sh @@ -1,4 +1,5 @@ #!/bin/bash +# Copyright (c) 2020-2023, NVIDIA CORPORATION. set -euo pipefail diff --git a/ci/check_style.sh b/ci/check_style.sh index 66f961d41..82715f5ff 100755 --- a/ci/check_style.sh +++ b/ci/check_style.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2020, NVIDIA CORPORATION. +# Copyright (c) 2020-2023, NVIDIA CORPORATION. set -euo pipefail diff --git a/ci/test_cpp.sh b/ci/test_cpp.sh index 257e015fd..46022f8f2 100755 --- a/ci/test_cpp.sh +++ b/ci/test_cpp.sh @@ -1,5 +1,5 @@ #!/bin/bash - +# Copyright (c) 2020-2023, NVIDIA CORPORATION. set -euo pipefail . /opt/conda/etc/profile.d/conda.sh @@ -23,21 +23,18 @@ rapids-mamba-retry install \ RAPIDS_TESTS_DIR=${RAPIDS_TESTS_DIR:-"${PWD}/test-results"} mkdir -p "${RAPIDS_TESTS_DIR}" -SUITEERROR=0 rapids-logger "Check GPU usage" nvidia-smi +EXITCODE=0 +trap "EXITCODE=1" ERR set +e rapids-logger "Running googletests" for gt in "$CONDA_PREFIX/bin/gtests/librmm/"* ; do ${gt} --gtest_output=xml:${RAPIDS_TESTS_DIR}/ - exitcode=$? - if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: GTest ${gt}" - fi done -exit ${SUITEERROR} +rapids-logger "Test script exiting with value: $EXITCODE" +exit ${EXITCODE} diff --git a/ci/test_python.sh b/ci/test_python.sh index a8b446ca2..d8c1fdbce 100755 --- a/ci/test_python.sh +++ b/ci/test_python.sh @@ -1,5 +1,5 @@ #!/bin/bash - +# Copyright (c) 2020-2023, NVIDIA CORPORATION. set -euo pipefail rapids-logger "Create test conda environment" @@ -27,13 +27,14 @@ rapids-mamba-retry install \ RAPIDS_TESTS_DIR=${RAPIDS_TESTS_DIR:-"${PWD}/test-results"} RAPIDS_COVERAGE_DIR=${RAPIDS_COVERAGE_DIR:-"${PWD}/coverage-results"} mkdir -p "${RAPIDS_TESTS_DIR}" "${RAPIDS_COVERAGE_DIR}" -SUITEERROR=0 rapids-logger "Check GPU usage" nvidia-smi cd python +EXITCODE=0 +trap "EXITCODE=1" ERR set +e rapids-logger "pytest rmm" @@ -46,10 +47,5 @@ pytest \ --cov-report=xml:"${RAPIDS_COVERAGE_DIR}/rmm-coverage.xml" \ --cov-report term -exitcode=$? -if (( ${exitcode} != 0 )); then - SUITEERROR=${exitcode} - echo "FAILED: 1 or more tests in /rmm/python" -fi - -exit ${SUITEERROR} +rapids-logger "Test script exiting with value: $EXITCODE" +exit ${EXITCODE} From db924930a5ffc129a7dd4568d4265e9c360e24b5 Mon Sep 17 00:00:00 2001 From: Bradley Dice Date: Mon, 13 Feb 2023 12:14:59 -0600 Subject: [PATCH 05/25] Update to spdlog>=1.11.0, fmt>=9.1.0. (#1177) Updates to `spdlog>=1.11.0` and `fmt>=9.1.0`. Also resolves some issues with spdlog in the librmm conda packages. Thanks @robertmaynard for helping advise me on this PR. **We need to test this downstream before merging.** Perhaps with cuML or some other library. Authors: - Bradley Dice (https://github.com/bdice) - Keith Kraus (https://github.com/kkraus14) Approvers: - Robert Maynard (https://github.com/robertmaynard) - Mark Harris (https://github.com/harrism) - Keith Kraus (https://github.com/kkraus14) - AJ Schmidt (https://github.com/ajschmidt8) - Vyas Ramasubramani (https://github.com/vyasr) URL: https://github.com/rapidsai/rmm/pull/1177 --- CMakeLists.txt | 8 ++++++- cmake/thirdparty/get_fmt.cmake | 22 +++++++++++++++++++ cmake/thirdparty/get_spdlog.cmake | 4 ++-- .../all_cuda-118_arch-x86_64.yaml | 3 ++- conda/recipes/librmm/conda_build_config.yaml | 5 ++++- conda/recipes/librmm/meta.yaml | 10 ++++++++- dependencies.yaml | 3 ++- include/rmm/logger.hpp | 14 ++++++------ .../rmm/mr/device/arena_memory_resource.hpp | 1 - include/rmm/mr/device/detail/arena.hpp | 4 ++-- .../mr/device/detail/coalescing_free_list.hpp | 6 +++-- .../detail/stream_ordered_memory_resource.hpp | 4 +++- .../mr/device/logging_resource_adaptor.hpp | 3 ++- .../rmm/mr/device/pool_memory_resource.hpp | 4 +++- .../mr/device/tracking_resource_adaptor.hpp | 4 +++- python/CMakeLists.txt | 4 ++++ tests/mr/device/callback_mr_tests.cpp | 4 ++-- 17 files changed, 78 insertions(+), 25 deletions(-) create mode 100644 cmake/thirdparty/get_fmt.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index 4cbb6c09b..b33ef5ad2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,5 +1,5 @@ # ============================================================================= -# Copyright (c) 2018-2021, NVIDIA CORPORATION. +# Copyright (c) 2018-2023, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except # in compliance with the License. You may obtain a copy of the License at @@ -14,6 +14,10 @@ cmake_minimum_required(VERSION 3.23.1 FATAL_ERROR) +# TODO(keith): REMOVE BEFORE MERGING +set(rapids-cmake-repo "kkraus14/rapids-cmake") +set(rapids-cmake-branch "spdlog_1.11_fmt") + if(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/RMM_RAPIDS.cmake) file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.04/RAPIDS.cmake ${CMAKE_CURRENT_BINARY_DIR}/RMM_RAPIDS.cmake) @@ -58,6 +62,7 @@ rapids_find_package( BUILD_EXPORT_SET rmm-exports INSTALL_EXPORT_SET rmm-exports) rapids_cpm_init() +include(cmake/thirdparty/get_fmt.cmake) include(cmake/thirdparty/get_spdlog.cmake) include(cmake/thirdparty/get_thrust.cmake) @@ -77,6 +82,7 @@ else() endif() target_link_libraries(rmm INTERFACE rmm::Thrust) +target_link_libraries(rmm INTERFACE fmt::fmt-header-only) target_link_libraries(rmm INTERFACE spdlog::spdlog_header_only) target_link_libraries(rmm INTERFACE dl) target_compile_features(rmm INTERFACE cxx_std_17 $) diff --git a/cmake/thirdparty/get_fmt.cmake b/cmake/thirdparty/get_fmt.cmake new file mode 100644 index 000000000..5787fb73f --- /dev/null +++ b/cmake/thirdparty/get_fmt.cmake @@ -0,0 +1,22 @@ +# ============================================================================= +# Copyright (c) 2023, NVIDIA CORPORATION. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express +# or implied. See the License for the specific language governing permissions and limitations under +# the License. +# ============================================================================= + +# Use CPM to find or clone fmt +function(find_and_configure_fmt) + + include(${rapids-cmake-dir}/cpm/fmt.cmake) + rapids_cpm_fmt(INSTALL_EXPORT_SET rmm-exports BUILD_EXPORT_SET rmm-exports) +endfunction() + +find_and_configure_fmt() diff --git a/cmake/thirdparty/get_spdlog.cmake b/cmake/thirdparty/get_spdlog.cmake index f78ae0262..24bbea89d 100644 --- a/cmake/thirdparty/get_spdlog.cmake +++ b/cmake/thirdparty/get_spdlog.cmake @@ -1,5 +1,5 @@ # ============================================================================= -# Copyright (c) 2021, NVIDIA CORPORATION. +# Copyright (c) 2021-2023, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except # in compliance with the License. You may obtain a copy of the License at @@ -16,7 +16,7 @@ function(find_and_configure_spdlog) include(${rapids-cmake-dir}/cpm/spdlog.cmake) - rapids_cpm_spdlog(INSTALL_EXPORT_SET rmm-exports) + rapids_cpm_spdlog(FMT_OPTION "EXTERNAL_FMT_HO" INSTALL_EXPORT_SET rmm-exports) rapids_export_package(BUILD spdlog rmm-exports) if(spdlog_ADDED) diff --git a/conda/environments/all_cuda-118_arch-x86_64.yaml b/conda/environments/all_cuda-118_arch-x86_64.yaml index e075564df..499f5dbec 100644 --- a/conda/environments/all_cuda-118_arch-x86_64.yaml +++ b/conda/environments/all_cuda-118_arch-x86_64.yaml @@ -8,6 +8,7 @@ dependencies: - cuda-python>=11.7.1,<12.0 - cudatoolkit=11.8 - cython>=0.29,<0.30 +- fmt>=9.1.0,<10 - gcovr>=5.0 - ninja - numba>=0.49 @@ -17,5 +18,5 @@ dependencies: - pytest-cov - python>=3.8,<3.11 - scikit-build>=0.13.1 -- spdlog>=1.8.5,<1.9 +- spdlog>=1.11.0,<1.12 name: all_cuda-118_arch-x86_64 diff --git a/conda/recipes/librmm/conda_build_config.yaml b/conda/recipes/librmm/conda_build_config.yaml index ce0bbeec5..39dec9e5c 100644 --- a/conda/recipes/librmm/conda_build_config.yaml +++ b/conda/recipes/librmm/conda_build_config.yaml @@ -10,11 +10,14 @@ cuda_compiler: cmake_version: - ">=3.23.1,!=3.25.0" +fmt_version: + - ">=9.1.0,<10" + gtest_version: - "=1.10.0" spdlog_version: - - ">=1.8.5,<1.9" + - ">=1.11.0,<1.12" sysroot_version: - "2.17" diff --git a/conda/recipes/librmm/meta.yaml b/conda/recipes/librmm/meta.yaml index 61174c7b7..dd3e58b62 100644 --- a/conda/recipes/librmm/meta.yaml +++ b/conda/recipes/librmm/meta.yaml @@ -21,7 +21,14 @@ requirements: - {{ compiler('cuda') }} {{ cuda_version }} - sysroot_{{ target_platform }} {{ sysroot_version }} host: - - cudatoolkit {{ cuda_version }}.* + - cudatoolkit ={{ cuda_version }} + # We require spdlog and fmt (which was devendored from spdlog + # conda-forge packages in 1.11.0) so that the spdlog headers are not + # pulled by CPM and installed as a part of the rmm packages. However, + # building against librmm still requires these headers. They are also + # added as a run requirement via the packages' run_exports. + - fmt {{ fmt_version }} + - spdlog {{ spdlog_version }} build: script_env: @@ -54,6 +61,7 @@ outputs: - cmake {{ cmake_version }} run: - cudatoolkit {{ cuda_spec }} + - fmt {{ fmt_version }} - spdlog {{ spdlog_version }} test: commands: diff --git a/dependencies.yaml b/dependencies.yaml index 4c7a1f524..879892ba3 100644 --- a/dependencies.yaml +++ b/dependencies.yaml @@ -49,7 +49,8 @@ dependencies: - scikit-build>=0.13.1 - output_types: conda packages: - - spdlog>=1.8.5,<1.9 + - fmt>=9.1.0,<10 + - spdlog>=1.11.0,<1.12 checks: common: - output_types: [conda, requirements] diff --git a/include/rmm/logger.hpp b/include/rmm/logger.hpp index 8109eb888..318535a4e 100644 --- a/include/rmm/logger.hpp +++ b/include/rmm/logger.hpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2021, NVIDIA CORPORATION. + * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,12 +16,8 @@ #pragma once -// If using GCC, temporary workaround for older libcudacxx defining _LIBCPP_VERSION -// undefine it before including spdlog, due to fmtlib checking if it is defined -// TODO: remove once libcudacxx is on Github and RAPIDS depends on it -#ifdef __GNUG__ -#undef _LIBCPP_VERSION -#endif +#include +#include #include #include @@ -115,3 +111,7 @@ inline spdlog::logger& logger() #define RMM_LOG_CRITICAL(...) SPDLOG_LOGGER_CRITICAL(&rmm::logger(), __VA_ARGS__) } // namespace rmm + +template <> +struct fmt::formatter : fmt::ostream_formatter { +}; diff --git a/include/rmm/mr/device/arena_memory_resource.hpp b/include/rmm/mr/device/arena_memory_resource.hpp index 1b1043b4a..b007d8f54 100644 --- a/include/rmm/mr/device/arena_memory_resource.hpp +++ b/include/rmm/mr/device/arena_memory_resource.hpp @@ -23,7 +23,6 @@ #include #include -#include #include #include diff --git a/include/rmm/mr/device/detail/arena.hpp b/include/rmm/mr/device/detail/arena.hpp index c0e5df377..493fb6cb1 100644 --- a/include/rmm/mr/device/detail/arena.hpp +++ b/include/rmm/mr/device/detail/arena.hpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2022, NVIDIA CORPORATION. + * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,8 +24,8 @@ #include +#include #include -#include #include #include diff --git a/include/rmm/mr/device/detail/coalescing_free_list.hpp b/include/rmm/mr/device/detail/coalescing_free_list.hpp index 8c5db7b02..d98ef7968 100644 --- a/include/rmm/mr/device/detail/coalescing_free_list.hpp +++ b/include/rmm/mr/device/detail/coalescing_free_list.hpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2021, NVIDIA CORPORATION. + * Copyright (c) 2019-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,14 +16,16 @@ #pragma once -#include #include #include +#include + #include #include #include #include +#include #include namespace rmm::mr::detail { diff --git a/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp b/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp index 50495cb17..fa53e480a 100644 --- a/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp +++ b/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2021, NVIDIA CORPORATION. + * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,8 @@ #include #include +#include + #include #include diff --git a/include/rmm/mr/device/logging_resource_adaptor.hpp b/include/rmm/mr/device/logging_resource_adaptor.hpp index 60fd0d366..ee887f5b9 100644 --- a/include/rmm/mr/device/logging_resource_adaptor.hpp +++ b/include/rmm/mr/device/logging_resource_adaptor.hpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2021, NVIDIA CORPORATION. + * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ #include #include +#include #include #include #include diff --git a/include/rmm/mr/device/pool_memory_resource.hpp b/include/rmm/mr/device/pool_memory_resource.hpp index 60ab60cf4..297b3f864 100644 --- a/include/rmm/mr/device/pool_memory_resource.hpp +++ b/include/rmm/mr/device/pool_memory_resource.hpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2021, NVIDIA CORPORATION. + * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,8 @@ #include #include +#include + #include #include diff --git a/include/rmm/mr/device/tracking_resource_adaptor.hpp b/include/rmm/mr/device/tracking_resource_adaptor.hpp index fbcb44898..f1996ba01 100644 --- a/include/rmm/mr/device/tracking_resource_adaptor.hpp +++ b/include/rmm/mr/device/tracking_resource_adaptor.hpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2021, NVIDIA CORPORATION. + * Copyright (c) 2020-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,8 @@ #include #include +#include + #include #include #include diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 705f40ac7..8bea660d8 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -16,6 +16,10 @@ cmake_minimum_required(VERSION 3.23.1 FATAL_ERROR) set(rmm_version 23.04.00) +# TODO(keith): REMOVE BEFORE MERGING +set(rapids-cmake-repo "kkraus14/rapids-cmake") +set(rapids-cmake-branch "spdlog_1.11_fmt") + file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.04/RAPIDS.cmake ${CMAKE_BINARY_DIR}/RAPIDS.cmake) include(${CMAKE_BINARY_DIR}/RAPIDS.cmake) diff --git a/tests/mr/device/callback_mr_tests.cpp b/tests/mr/device/callback_mr_tests.cpp index 101a75fc8..95dac93ec 100644 --- a/tests/mr/device/callback_mr_tests.cpp +++ b/tests/mr/device/callback_mr_tests.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022, NVIDIA CORPORATION. + * Copyright (c) 2022-2023, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,7 +24,7 @@ #include -#include +#include #include #include From c9c83abb3c5b8285de98aad9c0ebe4f988015029 Mon Sep 17 00:00:00 2001 From: Bradley Dice Date: Mon, 13 Feb 2023 12:49:04 -0600 Subject: [PATCH 06/25] Revert changes overriding rapids-cmake repo. (#1209) PR #1177 was merged a little too early when CI passed due to the presence of a `/merge` comment and sufficient approvals. This reverts a temporary change to the rapids-cmake repo that is no longer needed because https://github.com/rapidsai/rapids-cmake/pull/368 has been merged. Authors: - Bradley Dice (https://github.com/bdice) Approvers: - Robert Maynard (https://github.com/robertmaynard) URL: https://github.com/rapidsai/rmm/pull/1209 --- CMakeLists.txt | 4 ---- python/CMakeLists.txt | 4 ---- 2 files changed, 8 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index b33ef5ad2..fcc0f5c90 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -14,10 +14,6 @@ cmake_minimum_required(VERSION 3.23.1 FATAL_ERROR) -# TODO(keith): REMOVE BEFORE MERGING -set(rapids-cmake-repo "kkraus14/rapids-cmake") -set(rapids-cmake-branch "spdlog_1.11_fmt") - if(NOT EXISTS ${CMAKE_CURRENT_BINARY_DIR}/RMM_RAPIDS.cmake) file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.04/RAPIDS.cmake ${CMAKE_CURRENT_BINARY_DIR}/RMM_RAPIDS.cmake) diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 8bea660d8..705f40ac7 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -16,10 +16,6 @@ cmake_minimum_required(VERSION 3.23.1 FATAL_ERROR) set(rmm_version 23.04.00) -# TODO(keith): REMOVE BEFORE MERGING -set(rapids-cmake-repo "kkraus14/rapids-cmake") -set(rapids-cmake-branch "spdlog_1.11_fmt") - file(DOWNLOAD https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.04/RAPIDS.cmake ${CMAKE_BINARY_DIR}/RAPIDS.cmake) include(${CMAKE_BINARY_DIR}/RAPIDS.cmake) From 82e184fe21c6134a05961d6d9e560b65efefd8c6 Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Thu, 16 Feb 2023 12:10:12 -0800 Subject: [PATCH 07/25] Stop using versioneer to manage versions (#1190) This PR replaces usage of versioneer with hard-coded version numbers in setup.py and __init__.py. Since rmm needs to manage versions across a wide range of file types (CMake, C++, Sphinx and doxygen docs, etc), versioneer cannot be relied on as a single source of truth and therefore does not allow us to single-source our versioning to the Git repo as is intended. Additionally, since the primary means of installing rmm is via conda packages (or now, pip packages), information from the package manager tends to be far more informative than the version strings for troubleshooting and debugging purposes. Conversely, the nonstandard version strings that it produces tend to be problematic for other tools, which at best will ignore such versions but at worst will simply fail. Relies on https://github.com/rapidsai/shared-action-workflows/pull/38 Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Bradley Dice (https://github.com/bdice) - Sevag H (https://github.com/sevagh) URL: https://github.com/rapidsai/rmm/pull/1190 --- .gitattributes | 1 - .github/workflows/build.yaml | 1 + .github/workflows/pr.yaml | 1 + ci/check_style.sh | 2 +- ci/release/apply_wheel_modifications.sh | 9 + ci/release/update-version.sh | 2 + python/rmm/__init__.py | 5 +- python/rmm/_version.py | 566 ------- python/setup.cfg | 12 - python/setup.py | 16 +- python/versioneer.py | 1904 ----------------------- 11 files changed, 16 insertions(+), 2503 deletions(-) delete mode 100644 .gitattributes create mode 100755 ci/release/apply_wheel_modifications.sh delete mode 100644 python/rmm/_version.py delete mode 100644 python/versioneer.py diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 723c5b8b1..000000000 --- a/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -python/rmm/_version.py export-subst diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index ef346a186..31a1bd35f 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -74,6 +74,7 @@ jobs: package-name: rmm package-dir: python skbuild-configure-options: "-DRMM_BUILD_WHEELS=ON" + uses-versioneer: false wheel-publish: needs: wheel-build secrets: inherit diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index d8f0b0d3c..858c02691 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -68,6 +68,7 @@ jobs: package-dir: python package-name: rmm skbuild-configure-options: "-DRMM_BUILD_WHEELS=ON" + uses-versioneer: false wheel-tests: needs: wheel-build secrets: inherit diff --git a/ci/check_style.sh b/ci/check_style.sh index 82715f5ff..f073ff969 100755 --- a/ci/check_style.sh +++ b/ci/check_style.sh @@ -14,7 +14,7 @@ rapids-dependency-file-generator \ rapids-mamba-retry env create --force -f env.yaml -n checks conda activate checks -FORMAT_FILE_URL=https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.02/cmake-format-rapids-cmake.json +FORMAT_FILE_URL=https://raw.githubusercontent.com/rapidsai/rapids-cmake/branch-23.04/cmake-format-rapids-cmake.json export RAPIDS_CMAKE_FORMAT_FILE=/tmp/rapids_cmake_ci/cmake-formats-rapids-cmake.json mkdir -p $(dirname ${RAPIDS_CMAKE_FORMAT_FILE}) wget -O ${RAPIDS_CMAKE_FORMAT_FILE} ${FORMAT_FILE_URL} diff --git a/ci/release/apply_wheel_modifications.sh b/ci/release/apply_wheel_modifications.sh new file mode 100755 index 000000000..aad6ae84f --- /dev/null +++ b/ci/release/apply_wheel_modifications.sh @@ -0,0 +1,9 @@ +#!/bin/bash +# Copyright (c) 2023, NVIDIA CORPORATION. +# +# Usage: bash apply_wheel_modifications.sh + +VERSION=${1} + +sed -i "s/__version__ = .*/__version__ = \"${VERSION}\"/g" python/rmm/__init__.py +sed -i "s/version=.*,/version=\"${VERSION}\",/g" python/setup.py diff --git a/ci/release/update-version.sh b/ci/release/update-version.sh index 257a013b9..c028a8eaf 100755 --- a/ci/release/update-version.sh +++ b/ci/release/update-version.sh @@ -36,6 +36,8 @@ sed_runner 's/'"branch-.*\/RAPIDS.cmake"'/'"branch-${NEXT_SHORT_TAG}\/RAPIDS.cma # Python update sed_runner 's/'"rmm_version .*)"'/'"rmm_version ${NEXT_FULL_TAG})"'/g' python/CMakeLists.txt sed_runner 's/'"branch-.*\/RAPIDS.cmake"'/'"branch-${NEXT_SHORT_TAG}\/RAPIDS.cmake"'/g' python/CMakeLists.txt +sed_runner "s/__version__ = .*/__version__ = \"${NEXT_FULL_TAG}\"/g" python/rmm/__init__.py +sed_runner "s/version=.*,/version=\"${NEXT_FULL_TAG}\",/g" python/setup.py # cmake-format rapids-cmake definitions sed_runner 's/'"branch-.*\/cmake-format-rapids-cmake.json"'/'"branch-${NEXT_SHORT_TAG}\/cmake-format-rapids-cmake.json"'/g' ci/check_style.sh diff --git a/python/rmm/__init__.py b/python/rmm/__init__.py index 9fb13fe73..e2d8f57d7 100644 --- a/python/rmm/__init__.py +++ b/python/rmm/__init__.py @@ -11,11 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import weakref from rmm import mr from rmm._lib.device_buffer import DeviceBuffer -from rmm._version import get_versions from rmm.mr import disable_logging, enable_logging, get_log_filenames from rmm.rmm import ( RMMError, @@ -36,7 +34,6 @@ "disable_logging", "enable_logging", "get_log_filenames", - "get_versions", "is_initialized", "mr", "register_reinitialize_hook", @@ -45,4 +42,4 @@ "unregister_reinitialize_hook", ] -__version__ = get_versions()["version"] +__version__ = "23.04.00" diff --git a/python/rmm/_version.py b/python/rmm/_version.py deleted file mode 100644 index 0dd28467b..000000000 --- a/python/rmm/_version.py +++ /dev/null @@ -1,566 +0,0 @@ -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "v" - cfg.parentdir_prefix = "rmm-" - cfg.versionfile_source = "rmm/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - - return decorate - - -def run_command( - commands, args, cwd=None, verbose=False, hide_stderr=False, env=None -): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command( - GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True - ) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ( - "unable to parse git-describe output: '%s'" % describe_out - ) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command( - GITS, ["rev-list", "HEAD", "--count"], cwd=root - ) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ - 0 - ].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords( - get_keywords(), cfg.tag_prefix, verbose - ) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for i in cfg.versionfile_source.split("/"): - root = os.path.dirname(root) - except NameError: - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None, - } - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } diff --git a/python/setup.cfg b/python/setup.cfg index 7a1f5c819..d5b0510b1 100644 --- a/python/setup.cfg +++ b/python/setup.cfg @@ -1,17 +1,5 @@ # Copyright (c) 2018-2021, NVIDIA CORPORATION. -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - - -[versioneer] -VCS = git -style = pep440 -versionfile_source = rmm/_version.py -versionfile_build = rmm/_version.py -tag_prefix = v -parentdir_prefix = rmm- [flake8] filename = *.py, *.pyx, *.pxd, *.pxi diff --git a/python/setup.py b/python/setup.py index 2adcf53b1..a60f87e75 100644 --- a/python/setup.py +++ b/python/setup.py @@ -2,25 +2,12 @@ import os -import versioneer from setuptools import find_packages from skbuild import setup -if "RAPIDS_PY_WHEEL_VERSIONEER_OVERRIDE" in os.environ: - orig_get_versions = versioneer.get_versions - - version_override = os.environ["RAPIDS_PY_WHEEL_VERSIONEER_OVERRIDE"] - - def get_versions(): - data = orig_get_versions() - data["version"] = version_override - return data - - versioneer.get_versions = get_versions - setup( name="rmm" + os.getenv("RAPIDS_PY_WHEEL_CUDA_SUFFIX", default=""), - version=versioneer.get_version(), + version="23.04.00", description="rmm - RAPIDS Memory Manager", url="https://github.com/rapidsai/rmm", author="NVIDIA Corporation", @@ -49,7 +36,6 @@ def get_versions(): include=["rmm._lib", "rmm._lib.includes", "rmm._cuda*"] ) }, - cmdclass=versioneer.get_cmdclass(), install_requires=[ "cuda-python>=11.7.1,<12.0", "numpy>=1.19", diff --git a/python/versioneer.py b/python/versioneer.py deleted file mode 100644 index 72e29eaae..000000000 --- a/python/versioneer.py +++ /dev/null @@ -1,1904 +0,0 @@ -# Version: 0.18 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer -* Brian Warner -* License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) - -This is a tool for managing a recorded version number in distutils-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) -* run `versioneer install` in your source tree, commit the results - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from cudf._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other langauges) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -""" - -from __future__ import print_function - -import errno -import json -import os -import re -import subprocess -import sys - -try: - import configparser -except ImportError: - import ConfigParser as configparser - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ( - "Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND')." - ) - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - me = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(me)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print( - "Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py) - ) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: - parser.readfp(f) - VCS = parser.get("versioneer", "VCS") # mandatory - - def get(parser, name): - if parser.has_option("versioneer", name): - return parser.get("versioneer", name) - return None - - cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = get(parser, "style") or "" - cfg.versionfile_source = get(parser, "versionfile_source") - cfg.versionfile_build = get(parser, "versionfile_build") - cfg.tag_prefix = get(parser, "tag_prefix") - if cfg.tag_prefix in ("''", '""'): - cfg.tag_prefix = "" - cfg.parentdir_prefix = get(parser, "parentdir_prefix") - cfg.verbose = get(parser, "verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - - return decorate - - -def run_command( - commands, args, cwd=None, verbose=False, hide_stderr=False, env=None -): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode - - -LONG_VERSION_PY[ - "git" -] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - p = None - for c in commands: - try: - dispcmd = str([c] + args) - # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, p.returncode - return stdout, p.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], - cwd=root)[0].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") - date = keywords.get("date") - if date is not None: - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - out, rc = run_command( - GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True - ) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = ( - "unable to parse git-describe output: '%s'" % describe_out - ) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = run_command( - GITS, ["rev-list", "HEAD", "--count"], cwd=root - ) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ - 0 - ].strip() - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(manifest_in, versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] - if ipy: - files.append(ipy) - try: - me = __file__ - if me.endswith(".pyc") or me.endswith(".pyo"): - me = os.path.splitext(me)[0] + ".py" - versioneer_file = os.path.relpath(me) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - f = open(".gitattributes", "r") - for line in f.readlines(): - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - f.close() - except EnvironmentError: - pass - if not present: - f = open(".gitattributes", "a+") - f.write("%s export-subst\n" % versionfile_source) - f.close() - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for i in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.18) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except EnvironmentError: - raise NotThisMethod("unable to read _version.py") - mo = re.search( - r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, - re.M | re.S, - ) - if not mo: - mo = re.search( - r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, - re.M | re.S, - ) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps( - versions, sort_keys=True, indent=1, separators=(",", ": ") - ) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Eexceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert ( - cfg.versionfile_source is not None - ), "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 - - cmds = {} - - # we add "version" to both distutils and setuptools - from distutils.core import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - - cmds["version"] = cmd_version - - # we override "build_py" in both distutils and setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py - else: - from distutils.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join( - self.build_lib, cfg.versionfile_build - ) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - cmds["build_py"] = cmd_build_py - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if "py2exe" in sys.modules: # py2exe enabled? - try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 - except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - cmds["py2exe"] = cmd_py2exe - - # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist - else: - from distutils.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file( - target_versionfile, self._versioneer_generated_versions - ) - - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -INIT_PY_SNIPPET = """ -from cudf._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - - -def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except ( - EnvironmentError, - configparser.NoSectionError, - configparser.NoOptionError, - ) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print( - "Adding sample versioneer config to setup.cfg", file=sys.stderr - ) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except EnvironmentError: - old = "" - if INIT_PY_SNIPPET not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except EnvironmentError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print( - " appending versionfile_source ('%s') to MANIFEST.in" - % cfg.versionfile_source - ) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1) From 0cdd0ba2af8f6f5c7093588aeb8bb45288cb5c0b Mon Sep 17 00:00:00 2001 From: Jake Awe <50372925+AyodeAwe@users.noreply.github.com> Date: Fri, 17 Feb 2023 09:54:40 -0600 Subject: [PATCH 08/25] Skip docs job in nightly runs (#1215) This PR configures the branch workflow to skip the docs job during nightly runs. Authors: - Jake Awe (https://github.com/AyodeAwe) Approvers: - AJ Schmidt (https://github.com/ajschmidt8) URL: https://github.com/rapidsai/rmm/pull/1215 --- .github/workflows/build.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 31a1bd35f..37e928e01 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -53,7 +53,7 @@ jobs: date: ${{ inputs.date }} sha: ${{ inputs.sha }} docs-build: - if: ${{ startsWith(github.ref, 'refs/heads/branch-') }} + if: github.ref_type == 'branch' && github.event_name == 'push' needs: python-build secrets: inherit uses: rapidsai/shared-action-workflows/.github/workflows/custom-job.yaml@branch-23.04 From 0fd9626b00a666725b0b5310e53e07ff5bf4bf77 Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Fri, 17 Feb 2023 10:49:58 -0800 Subject: [PATCH 09/25] Use script rather than environment variable to modify package names (#1212) The package name defined in setup.py needs to be modified for wheels to reflect the CUDA version that the wheel was built for. Currently that modification is done via an environment variable that is pulled in setup.py code. This changeset replaces that approach with a direct modification using a script (similar to what is done for versions in #1190) to facilitate moving towards static project metadata specification via pyproject.toml. This PR depends on https://github.com/rapidsai/shared-action-workflows/pull/45. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Sevag H (https://github.com/sevagh) - Ashwin Srinath (https://github.com/shwina) URL: https://github.com/rapidsai/rmm/pull/1212 --- .github/workflows/build.yaml | 2 +- .github/workflows/pr.yaml | 2 +- ci/release/apply_wheel_modifications.sh | 3 +++ python/setup.py | 4 +--- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 37e928e01..e5256cfe0 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -74,7 +74,7 @@ jobs: package-name: rmm package-dir: python skbuild-configure-options: "-DRMM_BUILD_WHEELS=ON" - uses-versioneer: false + uses-setup-env-vars: false wheel-publish: needs: wheel-build secrets: inherit diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 858c02691..240344bc1 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -68,7 +68,7 @@ jobs: package-dir: python package-name: rmm skbuild-configure-options: "-DRMM_BUILD_WHEELS=ON" - uses-versioneer: false + uses-setup-env-vars: false wheel-tests: needs: wheel-build secrets: inherit diff --git a/ci/release/apply_wheel_modifications.sh b/ci/release/apply_wheel_modifications.sh index aad6ae84f..c981cd162 100755 --- a/ci/release/apply_wheel_modifications.sh +++ b/ci/release/apply_wheel_modifications.sh @@ -4,6 +4,9 @@ # Usage: bash apply_wheel_modifications.sh VERSION=${1} +CUDA_SUFFIX=${2} sed -i "s/__version__ = .*/__version__ = \"${VERSION}\"/g" python/rmm/__init__.py sed -i "s/version=.*,/version=\"${VERSION}\",/g" python/setup.py + +sed -i "s/name=\"rmm\",/name=\"rmm${CUDA_SUFFIX}\",/g" python/setup.py diff --git a/python/setup.py b/python/setup.py index a60f87e75..221908184 100644 --- a/python/setup.py +++ b/python/setup.py @@ -1,12 +1,10 @@ # Copyright (c) 2019-2022, NVIDIA CORPORATION. -import os - from setuptools import find_packages from skbuild import setup setup( - name="rmm" + os.getenv("RAPIDS_PY_WHEEL_CUDA_SUFFIX", default=""), + name="rmm", version="23.04.00", description="rmm - RAPIDS Memory Manager", url="https://github.com/rapidsai/rmm", From f3113d9eb13d0795937dd3dd33a64c794f1a3661 Mon Sep 17 00:00:00 2001 From: Carl Simon Adorf Date: Tue, 21 Feb 2023 19:15:07 +0100 Subject: [PATCH 10/25] CI: Remove specification of manual stage for check_style.sh script. (#1214) Do not explicitly specify to run the "manual" stage when running pre-commits as part of the ci/check_style.sh script. Authors: - Carl Simon Adorf (https://github.com/csadorf) Approvers: - Vyas Ramasubramani (https://github.com/vyasr) - AJ Schmidt (https://github.com/ajschmidt8) URL: https://github.com/rapidsai/rmm/pull/1214 --- ci/check_style.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/check_style.sh b/ci/check_style.sh index f073ff969..f9bfea7b4 100755 --- a/ci/check_style.sh +++ b/ci/check_style.sh @@ -20,4 +20,4 @@ mkdir -p $(dirname ${RAPIDS_CMAKE_FORMAT_FILE}) wget -O ${RAPIDS_CMAKE_FORMAT_FILE} ${FORMAT_FILE_URL} # Run pre-commit checks -pre-commit run --hook-stage manual --all-files --show-diff-on-failure +pre-commit run --all-files --show-diff-on-failure From ebf591ffc2fd7b83605f5725ec6e8ba425858c02 Mon Sep 17 00:00:00 2001 From: jakirkham Date: Wed, 22 Feb 2023 10:50:56 -0800 Subject: [PATCH 11/25] Migrate as much as possible to `pyproject.toml` (#1151) * Adds `tomli` dependency for Python pre-3.11 * Moves package data handling to `MANIFEST.in` * Symlinks and packages README * Specifies build backend & moves to newer `setuptools` backend * Pulls all metadata into `pyproject.toml` * Migrates `setup.cfg` content to `pyproject.toml` * Simplifies `setup.py` to the minimal amount of logic necessary Authors: - https://github.com/jakirkham - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Bradley Dice (https://github.com/bdice) - AJ Schmidt (https://github.com/ajschmidt8) URL: https://github.com/rapidsai/rmm/pull/1151 --- .pre-commit-config.yaml | 4 +- ci/release/apply_wheel_modifications.sh | 6 +- .../all_cuda-118_arch-x86_64.yaml | 1 + conda/recipes/rmm/meta.yaml | 3 +- dependencies.yaml | 1 + python/{setup.cfg => .flake8} | 38 +-------- python/MANIFEST.in | 13 ++++ python/README.md | 1 + python/pyproject.toml | 78 ++++++++++++++++++- python/setup.py | 35 --------- 10 files changed, 99 insertions(+), 81 deletions(-) rename python/{setup.cfg => .flake8} (65%) create mode 100644 python/MANIFEST.in create mode 120000 python/README.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9d60da84c..55bdfd884 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ repos: rev: 5.12.0 hooks: - id: isort - args: ["--config-root=python/", "--resolve-all-configs"] + args: ["--settings-path=python/pyproject.toml"] files: python/.* types_or: [python, cython, pyi] - repo: https://github.com/ambv/black @@ -21,7 +21,7 @@ repos: rev: 5.0.4 hooks: - id: flake8 - args: ["--config=python/setup.cfg"] + args: ["--config=python/.flake8"] files: python/.*$ types: [file] types_or: [python, cython] diff --git a/ci/release/apply_wheel_modifications.sh b/ci/release/apply_wheel_modifications.sh index c981cd162..90472d869 100755 --- a/ci/release/apply_wheel_modifications.sh +++ b/ci/release/apply_wheel_modifications.sh @@ -1,12 +1,12 @@ #!/bin/bash # Copyright (c) 2023, NVIDIA CORPORATION. # -# Usage: bash apply_wheel_modifications.sh +# Usage: bash apply_wheel_modifications.sh VERSION=${1} CUDA_SUFFIX=${2} sed -i "s/__version__ = .*/__version__ = \"${VERSION}\"/g" python/rmm/__init__.py -sed -i "s/version=.*,/version=\"${VERSION}\",/g" python/setup.py +sed -i "s/^version = .*/version = \"${VERSION}\"/g" python/pyproject.toml -sed -i "s/name=\"rmm\",/name=\"rmm${CUDA_SUFFIX}\",/g" python/setup.py +sed -i "s/^name = \"rmm\"/name = \"rmm${CUDA_SUFFIX}\"/g" python/pyproject.toml diff --git a/conda/environments/all_cuda-118_arch-x86_64.yaml b/conda/environments/all_cuda-118_arch-x86_64.yaml index 499f5dbec..a001756a2 100644 --- a/conda/environments/all_cuda-118_arch-x86_64.yaml +++ b/conda/environments/all_cuda-118_arch-x86_64.yaml @@ -19,4 +19,5 @@ dependencies: - python>=3.8,<3.11 - scikit-build>=0.13.1 - spdlog>=1.11.0,<1.12 +- tomli name: all_cuda-118_arch-x86_64 diff --git a/conda/recipes/rmm/meta.yaml b/conda/recipes/rmm/meta.yaml index 7cf116dc0..b816e24c5 100644 --- a/conda/recipes/rmm/meta.yaml +++ b/conda/recipes/rmm/meta.yaml @@ -46,7 +46,8 @@ requirements: - librmm ={{ version }} - python - scikit-build >=0.13.1 - - setuptools + - setuptools >=61.0.0 + - tomli # [py<311] run: - cuda-python >=11.7.1,<12.0 - numba >=0.49 diff --git a/dependencies.yaml b/dependencies.yaml index 879892ba3..e6fde1b98 100644 --- a/dependencies.yaml +++ b/dependencies.yaml @@ -47,6 +47,7 @@ dependencies: - ninja - python>=3.8,<3.11 - scikit-build>=0.13.1 + - tomli - output_types: conda packages: - fmt>=9.1.0,<10 diff --git a/python/setup.cfg b/python/.flake8 similarity index 65% rename from python/setup.cfg rename to python/.flake8 index d5b0510b1..4d610ba8e 100644 --- a/python/setup.cfg +++ b/python/.flake8 @@ -1,5 +1,4 @@ -# Copyright (c) 2018-2021, NVIDIA CORPORATION. - +# Copyright (c) 2023, NVIDIA CORPORATION. [flake8] filename = *.py, *.pyx, *.pxd, *.pxi @@ -23,38 +22,3 @@ per-file-ignores = *.pyx: E211, E225, E226, E227, E275, E402, E999, W504 *.pxd: E211, E225, E226, E227, E275, E402, E999, W504 *.pxi: E211, E225, E226, E227, E275, E402, E999, W504 - -[isort] -line_length=79 -multi_line_output=3 -include_trailing_comma=True -force_grid_wrap=0 -combine_as_imports=True -order_by_type=True -known_dask= - dask - distributed - dask_cuda -known_rapids= - nvtext - cudf - cuml - cugraph - dask_cudf -known_first_party= - rmm -default_section=THIRDPARTY -sections=FUTURE,STDLIB,THIRDPARTY,DASK,RAPIDS,FIRSTPARTY,LOCALFOLDER -skip= - thirdparty - .eggs - .git - .hg - .mypy_cache - .tox - .venv - _build - buck-out - build - dist - __init__.py diff --git a/python/MANIFEST.in b/python/MANIFEST.in new file mode 100644 index 000000000..708e097ed --- /dev/null +++ b/python/MANIFEST.in @@ -0,0 +1,13 @@ +# Cython files +recursive-include rmm *.pxd +recursive-include rmm *.pyx + +# Build files. Don't use a recursive include on '.' in case the repo is dirty +include . CMakeLists.txt +recursive-include rmm CMakeLists.txt + +# License +include LICENSE + +# README +include README diff --git a/python/README.md b/python/README.md new file mode 120000 index 000000000..32d46ee88 --- /dev/null +++ b/python/README.md @@ -0,0 +1 @@ +../README.md \ No newline at end of file diff --git a/python/pyproject.toml b/python/pyproject.toml index 0d4cfea09..e05a2e73d 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -13,17 +13,52 @@ # limitations under the License. [build-system] - +build-backend = "setuptools.build_meta" requires = [ "wheel", - "setuptools", + "setuptools>=61.0.0", "cython>=0.29,<0.30", "scikit-build>=0.13.1", "cmake>=3.23.1,!=3.25.0", "ninja", - "cuda-python>=11.7.1,<12.0" + "cuda-python>=11.7.1,<12.0", + "tomli; python_version < '3.11'", +] + +[project] +name = "rmm" +version = "23.04.00" +description = "rmm - RAPIDS Memory Manager" +readme = { file = "README.md", content-type = "text/markdown" } +authors = [ + { name = "NVIDIA Corporation" }, +] +license = { text = "Apache 2.0" } +requires-python = ">=3.8" +dependencies = [ + "cuda-python>=11.7.1,<12.0", + "numpy>=1.19", + "numba>=0.49", +] +classifiers = [ + "Intended Audience :: Developers", + "Topic :: Database", + "Topic :: Scientific/Engineering", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.10", +] + +[project.optional-dependencies] +test = [ + "pytest", + "pytest-xdist", ] +[project.urls] +Homepage = "https://github.com/rapidsai/rmm" + [tool.black] line-length = 79 target-version = ["py38"] @@ -43,3 +78,40 @@ exclude = ''' dist )/ ''' + +[tool.isort] +line_length = 79 +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +combine_as_imports = true +order_by_type = true +known_first_party = [ + "rmm", +] +default_section = "THIRDPARTY" +sections = [ + "FUTURE", + "STDLIB", + "THIRDPARTY", + "FIRSTPARTY", + "LOCALFOLDER", +] +skip = [ + "thirdparty", + ".eggs", + ".git", + ".hg", + ".mypy_cache", + ".tox", + ".venv", + "_build", + "buck-out", + "build", + "dist", + "__init__.py", +] + +[tool.setuptools] +license-files = ["LICENSE"] +zip-safe = false diff --git a/python/setup.py b/python/setup.py index 221908184..bf60268a6 100644 --- a/python/setup.py +++ b/python/setup.py @@ -4,40 +4,5 @@ from skbuild import setup setup( - name="rmm", - version="23.04.00", - description="rmm - RAPIDS Memory Manager", - url="https://github.com/rapidsai/rmm", - author="NVIDIA Corporation", - license="Apache 2.0", - classifiers=[ - "Intended Audience :: Developers", - "Topic :: Database", - "Topic :: Scientific/Engineering", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - ], - # Include the separately-compiled shared library - extras_require={"test": ["pytest"]}, packages=find_packages(include=["rmm", "rmm.*"]), - include_package_data=True, - python_requires=">=3.8", - package_data={ - # Note: A dict comprehension with an explicit copy is necessary (rather - # than something simpler like a dict.fromkeys) because otherwise every - # package will refer to the same list and skbuild modifies it in place. - key: ["*.hpp", "*.pxd"] - for key in find_packages( - include=["rmm._lib", "rmm._lib.includes", "rmm._cuda*"] - ) - }, - install_requires=[ - "cuda-python>=11.7.1,<12.0", - "numpy>=1.19", - "numba>=0.49", - ], - zip_safe=False, ) From c085cdb7b61a2fb88657303bc2e7f509b235a3ec Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Wed, 22 Feb 2023 14:07:25 -0800 Subject: [PATCH 12/25] Specify include_package_data to setup (#1218) For pure setuptools-based builds, the `include_package_data` option historically defaulted to False. With newer projects, i.e. with projects containing a pyproject.toml file, it defaults to True. However, for projects using scikit-build this setting must be provided explicitly to the `setup` function (not in the config file) to allow scikit-build to intercept the argument and use it in the preprocessing it does before invoking `setuptools.setup`. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - https://github.com/jakirkham URL: https://github.com/rapidsai/rmm/pull/1218 --- python/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/setup.py b/python/setup.py index bf60268a6..1572a4894 100644 --- a/python/setup.py +++ b/python/setup.py @@ -5,4 +5,5 @@ setup( packages=find_packages(include=["rmm", "rmm.*"]), + include_package_data=True, ) From fd6cddef368427e0d7e69f1384968b18fc77ffdc Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Fri, 24 Feb 2023 14:58:24 -0800 Subject: [PATCH 13/25] Fix some minor oversights in the conversion to pyproject.toml (#1226) There were a couple of issues that accidentally made it through in #1151. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Bradley Dice (https://github.com/bdice) URL: https://github.com/rapidsai/rmm/pull/1226 --- python/MANIFEST.in | 6 ------ python/pyproject.toml | 1 - python/setup.py | 1 + 3 files changed, 1 insertion(+), 7 deletions(-) diff --git a/python/MANIFEST.in b/python/MANIFEST.in index 708e097ed..0a521fd9e 100644 --- a/python/MANIFEST.in +++ b/python/MANIFEST.in @@ -5,9 +5,3 @@ recursive-include rmm *.pyx # Build files. Don't use a recursive include on '.' in case the repo is dirty include . CMakeLists.txt recursive-include rmm CMakeLists.txt - -# License -include LICENSE - -# README -include README diff --git a/python/pyproject.toml b/python/pyproject.toml index e05a2e73d..873e21445 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -114,4 +114,3 @@ skip = [ [tool.setuptools] license-files = ["LICENSE"] -zip-safe = false diff --git a/python/setup.py b/python/setup.py index 1572a4894..64d8524e3 100644 --- a/python/setup.py +++ b/python/setup.py @@ -6,4 +6,5 @@ setup( packages=find_packages(include=["rmm", "rmm.*"]), include_package_data=True, + zip_safe=False, ) From ef997db9193d6a9d5b9583f123451d44e3340169 Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Fri, 24 Feb 2023 15:29:56 -0800 Subject: [PATCH 14/25] Run rapids-dependency-file-generator via pre-commit (#1217) This feature is enabled by the most recent release of dfg. This changes also allows us to disable the dfg CI check as it is now redundant. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - AJ Schmidt (https://github.com/ajschmidt8) URL: https://github.com/rapidsai/rmm/pull/1217 --- .github/workflows/pr.yaml | 2 ++ .pre-commit-config.yaml | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 240344bc1..eb3607c8c 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -25,6 +25,8 @@ jobs: checks: secrets: inherit uses: rapidsai/shared-action-workflows/.github/workflows/checks.yaml@branch-23.04 + with: + enable_check_generated_files: false conda-cpp-build: needs: checks secrets: inherit diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 55bdfd884..261c57b27 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,15 @@ -# Copyright (c) 2022, NVIDIA CORPORATION. +# Copyright (c) 2022-2023, NVIDIA CORPORATION. repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer + - repo: https://github.com/rapidsai/dependency-file-generator + rev: v1.4.0 + hooks: + - id: rapids-dependency-file-generator + args: ["--clean"] - repo: https://github.com/PyCQA/isort rev: 5.12.0 hooks: From 1a75350e04649edd9b7a8186f5f600c7a66b4972 Mon Sep 17 00:00:00 2001 From: Lawrence Mitchell Date: Mon, 27 Feb 2023 20:21:32 +0000 Subject: [PATCH 15/25] Move external allocators into rmm.allocators module to defer imports (#1221) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit RMM provides callbacks to configure third-party libraries to use RMM for memory allocation. Previously, these were defined in the top-level package, but that requires (potentially expensive) import of the package we're providing a hook for, since typically we must import that package to define the callback. This makes importing RMM expensive. To avoid this, move the callbacks into (not imported by default) sub-modules in `rmm.allocators`. So, if we want to configure the CuPy allocator, we now import `rmm_cupy_allocator` from `rmm.allocators.cupy`, and don't pay the price of importing pytorch. This change **deprecates** the use of the allocator callbacks in the top-level `rmm` module in favour of explicit imports from the relevant `rmm.allocators.XXX` sub-module. Before these changes, a sampling trace of `import rmm` with pyinstrument shows: $ pyinstrument -i 0.01 importrmm.py _ ._ __/__ _ _ _ _ _/_ Recorded: 10:19:56 Samples: 67 /_//_/// /_\ / //_// / //_'/ // Duration: 0.839 CPU time: 0.837 / _/ v4.4.0 Program: importrmm.py 0.839 importrmm.py:1 └─ 0.839 rmm/__init__.py:1 ├─ 0.315 rmm/allocators/torch.py:1 │ └─ 0.315 torch/__init__.py:1 │ [96 frames hidden] torch, , enum, inspect, tok... ├─ 0.297 rmm/mr.py:1 │ └─ 0.297 rmm/_lib/__init__.py:1 │ ├─ 0.216 numba/__init__.py:1 │ │ [140 frames hidden] numba, abc, , importlib, em... │ ├─ 0.040 numba/cuda/__init__.py:1 │ │ [34 frames hidden] numba, asyncio, ssl, , re, ... │ ├─ 0.030 __new__ enum.py:180 │ │ [5 frames hidden] enum, │ └─ 0.011 [self] None └─ 0.227 rmm/allocators/cupy.py:1 └─ 0.227 cupy/__init__.py:1 [123 frames hidden] cupy, pytest, _pytest, attr, importrmm.py:1 └─ 0.099 rmm/__init__.py:1 └─ 0.099 rmm/mr.py:1 └─ 0.099 rmm/_lib/__init__.py:1 ├─ 0.059 numpy/__init__.py:1 │ [31 frames hidden] numpy, re, sre_compile, , s... ├─ 0.020 __new__ enum.py:180 │ [2 frames hidden] enum ├─ 0.010 ctypes/__init__.py:1 │ [3 frames hidden] ctypes, └─ 0.010 _EnumDict.__setitem__ enum.py:89 [3 frames hidden] enum Closes #1211. Authors: - Lawrence Mitchell (https://github.com/wence-) - Bradley Dice (https://github.com/bdice) Approvers: - Vyas Ramasubramani (https://github.com/vyasr) - Bradley Dice (https://github.com/bdice) URL: https://github.com/rapidsai/rmm/pull/1221 --- README.md | 18 +-- python/docs/api.rst | 18 +++ python/docs/basics.md | 38 +++++-- python/rmm/__init__.py | 34 +++++- python/rmm/_cuda/gpu.py | 3 +- python/rmm/_cuda/stream.pyx | 22 ++-- python/rmm/allocators/__init__.py | 0 python/rmm/allocators/cupy.py | 44 ++++++++ python/rmm/allocators/numba.py | 125 +++++++++++++++++++++ python/rmm/allocators/torch.py | 26 +++++ python/rmm/rmm.py | 162 +-------------------------- python/rmm/tests/test_rmm.py | 18 +-- python/rmm/tests/test_rmm_pytorch.py | 4 +- 13 files changed, 311 insertions(+), 201 deletions(-) create mode 100644 python/rmm/allocators/__init__.py create mode 100644 python/rmm/allocators/cupy.py create mode 100644 python/rmm/allocators/numba.py create mode 100644 python/rmm/allocators/torch.py diff --git a/README.md b/README.md index 2f6d54a64..a05ffa5e5 100644 --- a/README.md +++ b/README.md @@ -691,6 +691,8 @@ resources MemoryResources are highly configurable and can be composed together in different ways. See `help(rmm.mr)` for more information. +## Using RMM with third-party libraries + ### Using RMM with CuPy You can configure [CuPy](https://cupy.dev/) to use RMM for memory @@ -698,9 +700,9 @@ allocations by setting the CuPy CUDA allocator to `rmm_cupy_allocator`: ```python ->>> import rmm +>>> from rmm.allocators.cupy import rmm_cupy_allocator >>> import cupy ->>> cupy.cuda.set_allocator(rmm.rmm_cupy_allocator) +>>> cupy.cuda.set_allocator(rmm_cupy_allocator) ``` @@ -718,15 +720,15 @@ This can be done in two ways: 1. Setting the environment variable `NUMBA_CUDA_MEMORY_MANAGER`: ```python - $ NUMBA_CUDA_MEMORY_MANAGER=rmm python (args) + $ NUMBA_CUDA_MEMORY_MANAGER=rmm.allocators.numba python (args) ``` 2. Using the `set_memory_manager()` function provided by Numba: ```python >>> from numba import cuda - >>> import rmm - >>> cuda.set_memory_manager(rmm.RMMNumbaManager) + >>> from rmm.allocators.numba import RMMNumbaManager + >>> cuda.set_memory_manager(RMMNumbaManager) ``` **Note:** This only configures Numba to use the current RMM resource for allocations. @@ -741,10 +743,11 @@ RMM-managed pool: ```python import rmm +from rmm.allocators.torch import rmm_torch_allocator import torch rmm.reinitialize(pool_allocator=True) -torch.cuda.memory.change_current_allocator(rmm.rmm_torch_allocator) +torch.cuda.memory.change_current_allocator(rmm_torch_allocator) ``` PyTorch and RMM will now share the same memory pool. @@ -753,13 +756,14 @@ You can, of course, use a custom memory resource with PyTorch as well: ```python import rmm +from rmm.allocators.torch import rmm_torch_allocator import torch # note that you can configure PyTorch to use RMM either before or # after changing RMM's memory resource. PyTorch will use whatever # memory resource is configured to be the "current" memory resource at # the time of allocation. -torch.cuda.change_current_allocator(rmm.rmm_torch_allocator) +torch.cuda.change_current_allocator(rmm_torch_allocator) # configure RMM to use a managed memory resource, wrapped with a # statistics resource adaptor that can report information about the diff --git a/python/docs/api.rst b/python/docs/api.rst index ebc68c354..73cd5dd81 100644 --- a/python/docs/api.rst +++ b/python/docs/api.rst @@ -17,3 +17,21 @@ Memory Resources :members: :undoc-members: :show-inheritance: + +Memory Allocators +----------------- + +.. automodule:: rmm.allocators.cupy + :members: + :undoc-members: + :show-inheritance: + +.. automodule:: rmm.allocators.numba + :members: + :undoc-members: + :show-inheritance: + +.. automodule:: rmm.allocators.torch + :members: + :undoc-members: + :show-inheritance: diff --git a/python/docs/basics.md b/python/docs/basics.md index 368145b3d..0c47073c1 100644 --- a/python/docs/basics.md +++ b/python/docs/basics.md @@ -131,21 +131,31 @@ resources MemoryResources are highly configurable and can be composed together in different ways. See `help(rmm.mr)` for more information. +## Using RMM with third-party libraries + +A number of libraries provide hooks to control their device +allocations. RMM provides implementations of these for +[CuPy](https://cupy.dev), +[numba](https://numba.readthedocs.io/en/stable/), and [PyTorch](https://pytorch.org) in the +`rmm.allocators` submodule. All these approaches configure the library +to use the _current_ RMM memory resource for device +allocations. + ### Using RMM with CuPy You can configure [CuPy](https://cupy.dev/) to use RMM for memory allocations by setting the CuPy CUDA allocator to -`rmm_cupy_allocator`: +`rmm.allocators.cupy.rmm_cupy_allocator`: ```python ->>> import rmm +>>> from rmm.allocators.cupy import rmm_cupy_allocator >>> import cupy ->>> cupy.cuda.set_allocator(rmm.rmm_cupy_allocator) +>>> cupy.cuda.set_allocator(rmm_cupy_allocator) ``` ### Using RMM with Numba -You can configure Numba to use RMM for memory allocations using the +You can configure [Numba](https://numba.readthedocs.io/en/stable/) to use RMM for memory allocations using the Numba [EMM Plugin](https://numba.readthedocs.io/en/stable/cuda/external-memory.html#setting-emm-plugin). This can be done in two ways: @@ -153,13 +163,27 @@ This can be done in two ways: 1. Setting the environment variable `NUMBA_CUDA_MEMORY_MANAGER`: ```bash - $ NUMBA_CUDA_MEMORY_MANAGER=rmm python (args) + $ NUMBA_CUDA_MEMORY_MANAGER=rmm.allocators.numba python (args) ``` 2. Using the `set_memory_manager()` function provided by Numba: ```python >>> from numba import cuda - >>> import rmm - >>> cuda.set_memory_manager(rmm.RMMNumbaManager) + >>> from rmm.allocators.numba import RMMNumbaManager + >>> cuda.set_memory_manager(RMMNumbaManager) ``` + +### Using RMM with PyTorch + +You can configure +[PyTorch](https://pytorch.org/docs/stable/notes/cuda.html) to use RMM +for memory allocations using their by configuring the current +allocator. + +```python +from rmm.allocators.torch import rmm_torch_allocator +import torch + +torch.cuda.memory.change_current_allocator(rmm_torch_allocator) +``` diff --git a/python/rmm/__init__.py b/python/rmm/__init__.py index e2d8f57d7..d9e86c13e 100644 --- a/python/rmm/__init__.py +++ b/python/rmm/__init__.py @@ -17,20 +17,15 @@ from rmm.mr import disable_logging, enable_logging, get_log_filenames from rmm.rmm import ( RMMError, - RMMNumbaManager, - _numba_memory_manager, is_initialized, register_reinitialize_hook, reinitialize, - rmm_cupy_allocator, - rmm_torch_allocator, unregister_reinitialize_hook, ) __all__ = [ "DeviceBuffer", "RMMError", - "RMMNumbaManager", "disable_logging", "enable_logging", "get_log_filenames", @@ -38,8 +33,35 @@ "mr", "register_reinitialize_hook", "reinitialize", - "rmm_cupy_allocator", "unregister_reinitialize_hook", ] __version__ = "23.04.00" + + +_deprecated_names = { + "rmm_cupy_allocator": "cupy", + "rmm_torch_allocator": "torch", + "RMMNumbaManager": "numba", + "_numba_memory_manager": "numba", +} + + +def __getattr__(name): + if name in _deprecated_names: + import importlib + import warnings + + package = _deprecated_names[name] + warnings.warn( + f"Use of 'rmm.{name}' is deprecated and will be removed. " + f"'{name}' now lives in the 'rmm.allocators.{package}' sub-module, " + "please update your imports.", + FutureWarning, + ) + module = importlib.import_module( + f".allocators.{package}", package=__name__ + ) + return getattr(module, name) + else: + raise AttributeError(f"Module '{__name__}' has no attribute '{name}'") diff --git a/python/rmm/_cuda/gpu.py b/python/rmm/_cuda/gpu.py index e7f768349..2a23b41e6 100644 --- a/python/rmm/_cuda/gpu.py +++ b/python/rmm/_cuda/gpu.py @@ -1,6 +1,5 @@ # Copyright (c) 2020, NVIDIA CORPORATION. -import numba.cuda from cuda import cuda, cudart @@ -84,6 +83,8 @@ def runtimeGetVersion(): """ # TODO: Replace this with `cuda.cudart.cudaRuntimeGetVersion()` when the # limitation is fixed. + import numba.cuda + major, minor = numba.cuda.runtime.get_version() return major * 1000 + minor * 10 diff --git a/python/rmm/_cuda/stream.pyx b/python/rmm/_cuda/stream.pyx index 4f2ce26d0..d60dde4e1 100644 --- a/python/rmm/_cuda/stream.pyx +++ b/python/rmm/_cuda/stream.pyx @@ -16,6 +16,7 @@ from cuda.ccudart cimport cudaStream_t from libc.stdint cimport uintptr_t from libcpp cimport bool +from rmm._lib.cuda_stream cimport CudaStream from rmm._lib.cuda_stream_view cimport ( cuda_stream_default, cuda_stream_legacy, @@ -23,12 +24,6 @@ from rmm._lib.cuda_stream_view cimport ( cuda_stream_view, ) -from numba import cuda - -from rmm._lib.cuda_stream cimport CudaStream - -from rmm._lib.cuda_stream import CudaStream - cdef class Stream: def __init__(self, obj=None): @@ -46,10 +41,11 @@ cdef class Stream: self._init_with_new_cuda_stream() elif isinstance(obj, Stream): self._init_from_stream(obj) - elif isinstance(obj, cuda.cudadrv.driver.Stream): - self._init_from_numba_stream(obj) else: - self._init_from_cupy_stream(obj) + try: + self._init_from_numba_stream(obj) + except TypeError: + self._init_from_cupy_stream(obj) @staticmethod cdef Stream _from_cudaStream_t(cudaStream_t s, object owner=None): @@ -94,8 +90,12 @@ cdef class Stream: return self.c_is_default() def _init_from_numba_stream(self, obj): - self._cuda_stream = (int(obj)) - self._owner = obj + from numba import cuda + if isinstance(obj, cuda.cudadrv.driver.Stream): + self._cuda_stream = (int(obj)) + self._owner = obj + else: + raise TypeError(f"Cannot create stream from {type(obj)}") def _init_from_cupy_stream(self, obj): try: diff --git a/python/rmm/allocators/__init__.py b/python/rmm/allocators/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/python/rmm/allocators/cupy.py b/python/rmm/allocators/cupy.py new file mode 100644 index 000000000..89947c46b --- /dev/null +++ b/python/rmm/allocators/cupy.py @@ -0,0 +1,44 @@ +# Copyright (c) 2023, NVIDIA CORPORATION. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from rmm import _lib as librmm +from rmm._cuda.stream import Stream + +try: + import cupy +except ImportError: + cupy = None + + +def rmm_cupy_allocator(nbytes): + """ + A CuPy allocator that makes use of RMM. + + Examples + -------- + >>> from rmm.allocators.cupy import rmm_cupy_allocator + >>> import cupy + >>> cupy.cuda.set_allocator(rmm_cupy_allocator) + """ + if cupy is None: + raise ModuleNotFoundError("No module named 'cupy'") + + stream = Stream(obj=cupy.cuda.get_current_stream()) + buf = librmm.device_buffer.DeviceBuffer(size=nbytes, stream=stream) + dev_id = -1 if buf.ptr else cupy.cuda.device.get_device_id() + mem = cupy.cuda.UnownedMemory( + ptr=buf.ptr, size=buf.size, owner=buf, device_id=dev_id + ) + ptr = cupy.cuda.memory.MemoryPointer(mem, 0) + + return ptr diff --git a/python/rmm/allocators/numba.py b/python/rmm/allocators/numba.py new file mode 100644 index 000000000..18a010e1c --- /dev/null +++ b/python/rmm/allocators/numba.py @@ -0,0 +1,125 @@ +# Copyright (c) 2023, NVIDIA CORPORATION. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ctypes + +from cuda.cuda import CUdeviceptr, cuIpcGetMemHandle +from numba import config, cuda +from numba.cuda import HostOnlyCUDAMemoryManager, IpcHandle, MemoryPointer + +from rmm import _lib as librmm + + +def _make_emm_plugin_finalizer(handle, allocations): + """ + Factory to make the finalizer function. + We need to bind *handle* and *allocations* into the actual finalizer, which + takes no args. + """ + + def finalizer(): + """ + Invoked when the MemoryPointer is freed + """ + # At exit time (particularly in the Numba test suite) allocations may + # have already been cleaned up by a call to Context.reset() for the + # context, even if there are some DeviceNDArrays and their underlying + # allocations lying around. Finalizers then get called by weakref's + # atexit finalizer, at which point allocations[handle] no longer + # exists. This is harmless, except that a traceback is printed just + # prior to exit (without abnormally terminating the program), but is + # worrying for the user. To avoid the traceback, we check if + # allocations is already empty. + # + # In the case where allocations is not empty, but handle is not in + # allocations, then something has gone wrong - so we only guard against + # allocations being completely empty, rather than handle not being in + # allocations. + if allocations: + del allocations[handle] + + return finalizer + + +class RMMNumbaManager(HostOnlyCUDAMemoryManager): + """ + External Memory Management Plugin implementation for Numba. Provides + on-device allocation only. + + See https://numba.readthedocs.io/en/stable/cuda/external-memory.html for + details of the interface being implemented here. + """ + + def initialize(self): + # No special initialization needed to use RMM within a given context. + pass + + def memalloc(self, size): + """ + Allocate an on-device array from the RMM pool. + """ + buf = librmm.DeviceBuffer(size=size) + ctx = self.context + + if config.CUDA_USE_NVIDIA_BINDING: + ptr = CUdeviceptr(int(buf.ptr)) + else: + # expect ctypes bindings in numba + ptr = ctypes.c_uint64(int(buf.ptr)) + + finalizer = _make_emm_plugin_finalizer(int(buf.ptr), self.allocations) + + # self.allocations is initialized by the parent, HostOnlyCUDAManager, + # and cleared upon context reset, so although we insert into it here + # and delete from it in the finalizer, we need not do any other + # housekeeping elsewhere. + self.allocations[int(buf.ptr)] = buf + + return MemoryPointer(ctx, ptr, size, finalizer=finalizer) + + def get_ipc_handle(self, memory): + """ + Get an IPC handle for the MemoryPointer memory with offset modified by + the RMM memory pool. + """ + start, end = cuda.cudadrv.driver.device_extents(memory) + + if config.CUDA_USE_NVIDIA_BINDING: + _, ipc_handle = cuIpcGetMemHandle(start) + offset = int(memory.handle) - int(start) + else: + ipc_handle = (ctypes.c_byte * 64)() # IPC handle is 64 bytes + cuda.cudadrv.driver.driver.cuIpcGetMemHandle( + ctypes.byref(ipc_handle), + start, + ) + offset = memory.handle.value - start + source_info = cuda.current_context().device.get_device_identity() + + return IpcHandle( + memory, ipc_handle, memory.size, source_info, offset=offset + ) + + def get_memory_info(self): + raise NotImplementedError() + + @property + def interface_version(self): + return 1 + + +# Enables the use of RMM for Numba via an environment variable setting, +# NUMBA_CUDA_MEMORY_MANAGER=rmm. See: +# https://numba.readthedocs.io/en/stable/cuda/external-memory.html#environment-variable +_numba_memory_manager = RMMNumbaManager diff --git a/python/rmm/allocators/torch.py b/python/rmm/allocators/torch.py new file mode 100644 index 000000000..65b310a89 --- /dev/null +++ b/python/rmm/allocators/torch.py @@ -0,0 +1,26 @@ +# Copyright (c) 2023, NVIDIA CORPORATION. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +try: + from torch.cuda.memory import CUDAPluggableAllocator +except ImportError: + rmm_torch_allocator = None +else: + import rmm._lib.torch_allocator + + _alloc_free_lib_path = rmm._lib.torch_allocator.__file__ + rmm_torch_allocator = CUDAPluggableAllocator( + _alloc_free_lib_path, + alloc_fn_name="allocate", + free_fn_name="deallocate", + ) diff --git a/python/rmm/rmm.py b/python/rmm/rmm.py index cae9971dc..e5290905c 100644 --- a/python/rmm/rmm.py +++ b/python/rmm/rmm.py @@ -11,15 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import ctypes - -from cuda.cuda import CUdeviceptr, cuIpcGetMemHandle -from numba import config, cuda -from numba.cuda import HostOnlyCUDAMemoryManager, IpcHandle, MemoryPointer - -import rmm -from rmm import _lib as librmm -from rmm._cuda.stream import Stream +from rmm import mr # Utility Functions @@ -86,7 +78,7 @@ def reinitialize( for func, args, kwargs in reversed(_reinitialize_hooks): func(*args, **kwargs) - rmm.mr._initialize( + mr._initialize( pool_allocator=pool_allocator, managed_memory=managed_memory, initial_pool_size=initial_pool_size, @@ -101,155 +93,7 @@ def is_initialized(): """ Returns True if RMM has been initialized, False otherwise. """ - return rmm.mr.is_initialized() - - -class RMMNumbaManager(HostOnlyCUDAMemoryManager): - """ - External Memory Management Plugin implementation for Numba. Provides - on-device allocation only. - - See https://numba.readthedocs.io/en/stable/cuda/external-memory.html for - details of the interface being implemented here. - """ - - def initialize(self): - # No special initialization needed to use RMM within a given context. - pass - - def memalloc(self, size): - """ - Allocate an on-device array from the RMM pool. - """ - buf = librmm.DeviceBuffer(size=size) - ctx = self.context - - if config.CUDA_USE_NVIDIA_BINDING: - ptr = CUdeviceptr(int(buf.ptr)) - else: - # expect ctypes bindings in numba - ptr = ctypes.c_uint64(int(buf.ptr)) - - finalizer = _make_emm_plugin_finalizer(int(buf.ptr), self.allocations) - - # self.allocations is initialized by the parent, HostOnlyCUDAManager, - # and cleared upon context reset, so although we insert into it here - # and delete from it in the finalizer, we need not do any other - # housekeeping elsewhere. - self.allocations[int(buf.ptr)] = buf - - return MemoryPointer(ctx, ptr, size, finalizer=finalizer) - - def get_ipc_handle(self, memory): - """ - Get an IPC handle for the MemoryPointer memory with offset modified by - the RMM memory pool. - """ - start, end = cuda.cudadrv.driver.device_extents(memory) - - if config.CUDA_USE_NVIDIA_BINDING: - _, ipchandle = cuIpcGetMemHandle(start) - offset = int(memory.handle) - int(start) - else: - ipchandle = (ctypes.c_byte * 64)() # IPC handle is 64 bytes - cuda.cudadrv.driver.driver.cuIpcGetMemHandle( - ctypes.byref(ipchandle), - start, - ) - offset = memory.handle.value - start - source_info = cuda.current_context().device.get_device_identity() - - return IpcHandle( - memory, ipchandle, memory.size, source_info, offset=offset - ) - - def get_memory_info(self): - raise NotImplementedError() - - @property - def interface_version(self): - return 1 - - -def _make_emm_plugin_finalizer(handle, allocations): - """ - Factory to make the finalizer function. - We need to bind *handle* and *allocations* into the actual finalizer, which - takes no args. - """ - - def finalizer(): - """ - Invoked when the MemoryPointer is freed - """ - # At exit time (particularly in the Numba test suite) allocations may - # have already been cleaned up by a call to Context.reset() for the - # context, even if there are some DeviceNDArrays and their underlying - # allocations lying around. Finalizers then get called by weakref's - # atexit finalizer, at which point allocations[handle] no longer - # exists. This is harmless, except that a traceback is printed just - # prior to exit (without abnormally terminating the program), but is - # worrying for the user. To avoid the traceback, we check if - # allocations is already empty. - # - # In the case where allocations is not empty, but handle is not in - # allocations, then something has gone wrong - so we only guard against - # allocations being completely empty, rather than handle not being in - # allocations. - if allocations: - del allocations[handle] - - return finalizer - - -# Enables the use of RMM for Numba via an environment variable setting, -# NUMBA_CUDA_MEMORY_MANAGER=rmm. See: -# https://numba.readthedocs.io/en/stable/cuda/external-memory.html#environment-variable -_numba_memory_manager = RMMNumbaManager - -try: - import cupy -except Exception: - cupy = None - - -def rmm_cupy_allocator(nbytes): - """ - A CuPy allocator that makes use of RMM. - - Examples - -------- - >>> import rmm - >>> import cupy - >>> cupy.cuda.set_allocator(rmm.rmm_cupy_allocator) - """ - if cupy is None: - raise ModuleNotFoundError("No module named 'cupy'") - - stream = Stream(obj=cupy.cuda.get_current_stream()) - buf = librmm.device_buffer.DeviceBuffer(size=nbytes, stream=stream) - dev_id = -1 if buf.ptr else cupy.cuda.device.get_device_id() - mem = cupy.cuda.UnownedMemory( - ptr=buf.ptr, size=buf.size, owner=buf, device_id=dev_id - ) - ptr = cupy.cuda.memory.MemoryPointer(mem, 0) - - return ptr - - -try: - from torch.cuda.memory import CUDAPluggableAllocator -except ImportError: - rmm_torch_allocator = None -else: - import rmm._lib.torch_allocator - - _alloc_free_lib_path = rmm._lib.torch_allocator.__file__ - rmm_torch_allocator = CUDAPluggableAllocator( - _alloc_free_lib_path, - alloc_fn_name="allocate", - free_fn_name="deallocate", - ) + return mr.is_initialized() def register_reinitialize_hook(func, *args, **kwargs): diff --git a/python/rmm/tests/test_rmm.py b/python/rmm/tests/test_rmm.py index f79c60b43..95afc8db3 100644 --- a/python/rmm/tests/test_rmm.py +++ b/python/rmm/tests/test_rmm.py @@ -24,6 +24,8 @@ import rmm import rmm._cuda.stream +from rmm.allocators.cupy import rmm_cupy_allocator +from rmm.allocators.numba import RMMNumbaManager if sys.version_info < (3, 8): try: @@ -33,7 +35,7 @@ else: import pickle -cuda.set_memory_manager(rmm.RMMNumbaManager) +cuda.set_memory_manager(RMMNumbaManager) _driver_version = rmm._cuda.gpu.driverGetVersion() _runtime_version = rmm._cuda.gpu.runtimeGetVersion() @@ -303,17 +305,17 @@ def test_rmm_pool_numba_stream(stream): def test_rmm_cupy_allocator(): cupy = pytest.importorskip("cupy") - m = rmm.rmm_cupy_allocator(42) + m = rmm_cupy_allocator(42) assert m.mem.size == 42 assert m.mem.ptr != 0 assert isinstance(m.mem._owner, rmm.DeviceBuffer) - m = rmm.rmm_cupy_allocator(0) + m = rmm_cupy_allocator(0) assert m.mem.size == 0 assert m.mem.ptr == 0 assert isinstance(m.mem._owner, rmm.DeviceBuffer) - cupy.cuda.set_allocator(rmm.rmm_cupy_allocator) + cupy.cuda.set_allocator(rmm_cupy_allocator) a = cupy.arange(10) assert isinstance(a.data.mem._owner, rmm.DeviceBuffer) @@ -323,7 +325,7 @@ def test_rmm_pool_cupy_allocator_with_stream(stream): cupy = pytest.importorskip("cupy") rmm.reinitialize(pool_allocator=True) - cupy.cuda.set_allocator(rmm.rmm_cupy_allocator) + cupy.cuda.set_allocator(rmm_cupy_allocator) if stream == "null": stream = cupy.cuda.stream.Stream.null @@ -331,12 +333,12 @@ def test_rmm_pool_cupy_allocator_with_stream(stream): stream = cupy.cuda.stream.Stream() with stream: - m = rmm.rmm_cupy_allocator(42) + m = rmm_cupy_allocator(42) assert m.mem.size == 42 assert m.mem.ptr != 0 assert isinstance(m.mem._owner, rmm.DeviceBuffer) - m = rmm.rmm_cupy_allocator(0) + m = rmm_cupy_allocator(0) assert m.mem.size == 0 assert m.mem.ptr == 0 assert isinstance(m.mem._owner, rmm.DeviceBuffer) @@ -355,7 +357,7 @@ def test_rmm_pool_cupy_allocator_stream_lifetime(): cupy = pytest.importorskip("cupy") rmm.reinitialize(pool_allocator=True) - cupy.cuda.set_allocator(rmm.rmm_cupy_allocator) + cupy.cuda.set_allocator(rmm_cupy_allocator) stream = cupy.cuda.stream.Stream() diff --git a/python/rmm/tests/test_rmm_pytorch.py b/python/rmm/tests/test_rmm_pytorch.py index eaa40c0ed..065507b61 100644 --- a/python/rmm/tests/test_rmm_pytorch.py +++ b/python/rmm/tests/test_rmm_pytorch.py @@ -2,7 +2,7 @@ import pytest -import rmm +from rmm.allocators.torch import rmm_torch_allocator torch = pytest.importorskip("torch") @@ -13,7 +13,7 @@ def torch_allocator(): from torch.cuda.memory import change_current_allocator except ImportError: pytest.skip("pytorch pluggable allocator not available") - change_current_allocator(rmm.rmm_torch_allocator) + change_current_allocator(rmm_torch_allocator) def test_rmm_torch_allocator(torch_allocator, stats_mr): From 3f30f3b14a34bce20a41938377e662e406436243 Mon Sep 17 00:00:00 2001 From: Bradley Dice Date: Mon, 27 Feb 2023 15:02:48 -0600 Subject: [PATCH 16/25] Remove pickle compatibility layer in tests for Python < 3.8. (#1224) This is a small cleanup PR to remove a pickle compatibility layer for Python < 3.8. Authors: - Bradley Dice (https://github.com/bdice) - https://github.com/jakirkham Approvers: - https://github.com/jakirkham URL: https://github.com/rapidsai/rmm/pull/1224 --- python/rmm/tests/test_rmm.py | 31 +++++++++++-------------------- 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/python/rmm/tests/test_rmm.py b/python/rmm/tests/test_rmm.py index 95afc8db3..70aafe601 100644 --- a/python/rmm/tests/test_rmm.py +++ b/python/rmm/tests/test_rmm.py @@ -15,7 +15,7 @@ import copy import gc import os -import sys +import pickle from itertools import product import numpy as np @@ -27,14 +27,6 @@ from rmm.allocators.cupy import rmm_cupy_allocator from rmm.allocators.numba import RMMNumbaManager -if sys.version_info < (3, 8): - try: - import pickle5 as pickle - except ImportError: - import pickle -else: - import pickle - cuda.set_memory_manager(RMMNumbaManager) _driver_version = rmm._cuda.gpu.driverGetVersion() @@ -278,17 +270,16 @@ def test_rmm_device_buffer_pickle_roundtrip(hb): hb2 = db2.tobytes() assert hb == hb2 # out-of-band - if pickle.HIGHEST_PROTOCOL >= 5: - db = rmm.DeviceBuffer.to_device(hb) - buffers = [] - pb2 = pickle.dumps(db, protocol=5, buffer_callback=buffers.append) - del db - assert len(buffers) == 1 - assert isinstance(buffers[0], pickle.PickleBuffer) - assert bytes(buffers[0]) == hb - db3 = pickle.loads(pb2, buffers=buffers) - hb3 = db3.tobytes() - assert hb3 == hb + db = rmm.DeviceBuffer.to_device(hb) + buffers = [] + pb2 = pickle.dumps(db, protocol=5, buffer_callback=buffers.append) + del db + assert len(buffers) == 1 + assert isinstance(buffers[0], pickle.PickleBuffer) + assert bytes(buffers[0]) == hb + db3 = pickle.loads(pb2, buffers=buffers) + hb3 = db3.tobytes() + assert hb3 == hb @pytest.mark.parametrize("stream", [cuda.default_stream(), cuda.stream()]) From 83c40c82da5aae01553b3e9ac0404200c01b1e8d Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Tue, 28 Feb 2023 06:33:24 -0800 Subject: [PATCH 17/25] Fix update-version.sh. (#1227) We need to update the version regex in update-version.sh for the pyproject.toml changes in #1151. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Bradley Dice (https://github.com/bdice) - Ray Douglass (https://github.com/raydouglass) URL: https://github.com/rapidsai/rmm/pull/1227 --- ci/release/update-version.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/release/update-version.sh b/ci/release/update-version.sh index c028a8eaf..05218e811 100755 --- a/ci/release/update-version.sh +++ b/ci/release/update-version.sh @@ -37,7 +37,7 @@ sed_runner 's/'"branch-.*\/RAPIDS.cmake"'/'"branch-${NEXT_SHORT_TAG}\/RAPIDS.cma sed_runner 's/'"rmm_version .*)"'/'"rmm_version ${NEXT_FULL_TAG})"'/g' python/CMakeLists.txt sed_runner 's/'"branch-.*\/RAPIDS.cmake"'/'"branch-${NEXT_SHORT_TAG}\/RAPIDS.cmake"'/g' python/CMakeLists.txt sed_runner "s/__version__ = .*/__version__ = \"${NEXT_FULL_TAG}\"/g" python/rmm/__init__.py -sed_runner "s/version=.*,/version=\"${NEXT_FULL_TAG}\",/g" python/setup.py +sed_runner "s/^version = .*/version = \"${NEXT_FULL_TAG}\"/g" python/pyproject.toml # cmake-format rapids-cmake definitions sed_runner 's/'"branch-.*\/cmake-format-rapids-cmake.json"'/'"branch-${NEXT_SHORT_TAG}\/cmake-format-rapids-cmake.json"'/g' ci/check_style.sh From 3bdadd6357fa302667309fe9075f294e40f06bf8 Mon Sep 17 00:00:00 2001 From: Ashwin Srinath <3190405+shwina@users.noreply.github.com> Date: Tue, 28 Feb 2023 14:03:25 -0500 Subject: [PATCH 18/25] Synchronize stream in `DeviceBuffer.c_from_unique_ptr` constructor (#1100) The `DeviceBuffer.__cinit__` method syncs the default stream to ensure that access to the `.ptr` and `.size` attributes of the underlying `rmm::device_buffer` is safe. However, the staticmethod `DeviceBuffer.c_from_unique_ptr` does not. This PR adds a stream sync to it. Authors: - Ashwin Srinath (https://github.com/shwina) - https://github.com/jakirkham Approvers: - Mark Harris (https://github.com/harrism) - https://github.com/jakirkham URL: https://github.com/rapidsai/rmm/pull/1100 --- python/rmm/_lib/device_buffer.pxd | 5 ++++- python/rmm/_lib/device_buffer.pyx | 9 ++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/python/rmm/_lib/device_buffer.pxd b/python/rmm/_lib/device_buffer.pxd index 42c2b4fef..364dbb2c0 100644 --- a/python/rmm/_lib/device_buffer.pxd +++ b/python/rmm/_lib/device_buffer.pxd @@ -49,7 +49,10 @@ cdef class DeviceBuffer: cdef Stream stream @staticmethod - cdef DeviceBuffer c_from_unique_ptr(unique_ptr[device_buffer] ptr) + cdef DeviceBuffer c_from_unique_ptr( + unique_ptr[device_buffer] ptr, + Stream stream=* + ) @staticmethod cdef DeviceBuffer c_to_device(const unsigned char[::1] b, diff --git a/python/rmm/_lib/device_buffer.pyx b/python/rmm/_lib/device_buffer.pyx index 43ebf41ac..c2bfd1459 100644 --- a/python/rmm/_lib/device_buffer.pyx +++ b/python/rmm/_lib/device_buffer.pyx @@ -158,9 +158,16 @@ cdef class DeviceBuffer: return self.copy() @staticmethod - cdef DeviceBuffer c_from_unique_ptr(unique_ptr[device_buffer] ptr): + cdef DeviceBuffer c_from_unique_ptr( + unique_ptr[device_buffer] ptr, + Stream stream=DEFAULT_STREAM + ): cdef DeviceBuffer buf = DeviceBuffer.__new__(DeviceBuffer) + if stream.c_is_default(): + stream.c_synchronize() buf.c_obj = move(ptr) + buf.mr = get_current_device_resource() + buf.stream = stream return buf @staticmethod From 8958356177e9a378d539f37cc70ae88425c478e2 Mon Sep 17 00:00:00 2001 From: Bradley Dice Date: Mon, 6 Mar 2023 12:21:04 -0800 Subject: [PATCH 19/25] Update to GCC 11 (#1228) This PR updates builds to use GCC 11. Authors: - Bradley Dice (https://github.com/bdice) Approvers: - AJ Schmidt (https://github.com/ajschmidt8) - Mark Harris (https://github.com/harrism) URL: https://github.com/rapidsai/rmm/pull/1228 --- conda/recipes/librmm/conda_build_config.yaml | 4 ++-- conda/recipes/rmm/conda_build_config.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/conda/recipes/librmm/conda_build_config.yaml b/conda/recipes/librmm/conda_build_config.yaml index 39dec9e5c..be4758d36 100644 --- a/conda/recipes/librmm/conda_build_config.yaml +++ b/conda/recipes/librmm/conda_build_config.yaml @@ -1,8 +1,8 @@ c_compiler_version: - - 9 + - 11 cxx_compiler_version: - - 9 + - 11 cuda_compiler: - nvcc diff --git a/conda/recipes/rmm/conda_build_config.yaml b/conda/recipes/rmm/conda_build_config.yaml index 322fe6faa..ad733ac69 100644 --- a/conda/recipes/rmm/conda_build_config.yaml +++ b/conda/recipes/rmm/conda_build_config.yaml @@ -1,8 +1,8 @@ c_compiler_version: - - 9 + - 11 cxx_compiler_version: - - 9 + - 11 cuda_compiler: - nvcc From 39f6ca57ccbfb696533c320b6745a71ed9ee7716 Mon Sep 17 00:00:00 2001 From: AJ Schmidt Date: Wed, 8 Mar 2023 17:19:00 -0500 Subject: [PATCH 20/25] Pass `AWS_SESSION_TOKEN` and `SCCACHE_S3_USE_SSL` vars to conda build (#1230) This PR ensures that the `AWS_SESSION_TOKEN` and `SCCACHE_S3_USE_SSL` environment variables are passed to our conda build process. `AWS_SESSION_TOKEN` is necessary in order to support using temporary credentials via AWS STS (we recently adopted this method in CI). `SCCACHE_S3_USE_SSL` has been reported to increase cache performance for S3. This PR also alphabetizes the `script_env` lists. Authors: - AJ Schmidt (https://github.com/ajschmidt8) Approvers: - Ray Douglass (https://github.com/raydouglass) URL: https://github.com/rapidsai/rmm/pull/1230 --- conda/recipes/librmm/meta.yaml | 18 ++++++++++-------- conda/recipes/rmm/meta.yaml | 16 +++++++++------- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/conda/recipes/librmm/meta.yaml b/conda/recipes/librmm/meta.yaml index dd3e58b62..5513d5059 100644 --- a/conda/recipes/librmm/meta.yaml +++ b/conda/recipes/librmm/meta.yaml @@ -32,18 +32,20 @@ requirements: build: script_env: - - PARALLEL_LEVEL - - CMAKE_GENERATOR + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SESSION_TOKEN - CMAKE_C_COMPILER_LAUNCHER - - CMAKE_CXX_COMPILER_LAUNCHER - CMAKE_CUDA_COMPILER_LAUNCHER - - SCCACHE_S3_KEY_PREFIX=librmm-aarch64 # [aarch64] - - SCCACHE_S3_KEY_PREFIX=librmm-linux64 # [linux64] + - CMAKE_CXX_COMPILER_LAUNCHER + - CMAKE_GENERATOR + - PARALLEL_LEVEL - SCCACHE_BUCKET - - SCCACHE_REGION - SCCACHE_IDLE_TIMEOUT - - AWS_ACCESS_KEY_ID - - AWS_SECRET_ACCESS_KEY + - SCCACHE_REGION + - SCCACHE_S3_KEY_PREFIX=librmm-aarch64 # [aarch64] + - SCCACHE_S3_KEY_PREFIX=librmm-linux64 # [linux64] + - SCCACHE_S3_USE_SSL outputs: - name: librmm diff --git a/conda/recipes/rmm/meta.yaml b/conda/recipes/rmm/meta.yaml index b816e24c5..2a4d668ef 100644 --- a/conda/recipes/rmm/meta.yaml +++ b/conda/recipes/rmm/meta.yaml @@ -17,17 +17,19 @@ build: number: {{ GIT_DESCRIBE_NUMBER }} string: cuda{{ cuda_major }}_py{{ py_version }}_{{ date_string }}_{{ GIT_DESCRIBE_HASH }}_{{ GIT_DESCRIBE_NUMBER }} script_env: - - CMAKE_GENERATOR + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY + - AWS_SESSION_TOKEN - CMAKE_C_COMPILER_LAUNCHER - - CMAKE_CXX_COMPILER_LAUNCHER - CMAKE_CUDA_COMPILER_LAUNCHER - - SCCACHE_S3_KEY_PREFIX=rmm-aarch64 # [aarch64] - - SCCACHE_S3_KEY_PREFIX=rmm-linux64 # [linux64] + - CMAKE_CXX_COMPILER_LAUNCHER + - CMAKE_GENERATOR - SCCACHE_BUCKET - - SCCACHE_REGION - SCCACHE_IDLE_TIMEOUT - - AWS_ACCESS_KEY_ID - - AWS_SECRET_ACCESS_KEY + - SCCACHE_REGION + - SCCACHE_S3_KEY_PREFIX=rmm-aarch64 # [aarch64] + - SCCACHE_S3_KEY_PREFIX=rmm-linux64 # [linux64] + - SCCACHE_S3_USE_SSL ignore_run_exports_from: - {{ compiler('cuda') }} From 663a9b0fb022864ed89f247e25465132a4eca589 Mon Sep 17 00:00:00 2001 From: Bradley Dice Date: Tue, 14 Mar 2023 16:29:37 -0500 Subject: [PATCH 21/25] Add codespell as a linter (#1231) Following the example of https://github.com/rapidsai/cudf/pull/12097, this PR adds [codespell](https://github.com/codespell-project/codespell) as a linter for rmm. Note: I have not included a section in the CONTRIBUTING.md about how to use this (as was done in cudf's PR) because I plan to overhaul the contributing guides for all RAPIDS repos in the near term, and have a single source in docs.rapids.ai with common information about linters used in RAPIDS. Authors: - Bradley Dice (https://github.com/bdice) Approvers: - Rong Ou (https://github.com/rongou) - Ben Frederickson (https://github.com/benfred) - Mark Harris (https://github.com/harrism) URL: https://github.com/rapidsai/rmm/pull/1231 --- .pre-commit-config.yaml | 9 +++++++++ benchmarks/synchronization/synchronization.hpp | 2 +- include/rmm/detail/stack_trace.hpp | 2 +- .../mr/device/detail/stream_ordered_memory_resource.hpp | 2 +- pyproject.toml | 9 +++++++++ tests/device_buffer_tests.cu | 2 +- tests/device_scalar_tests.cpp | 2 +- 7 files changed, 23 insertions(+), 5 deletions(-) create mode 100644 pyproject.toml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 261c57b27..bcbf17018 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -49,6 +49,15 @@ repos: (?x)^( ^benchmarks/utilities/cxxopts.hpp ) + - repo: https://github.com/codespell-project/codespell + rev: v2.2.4 + hooks: + - id: codespell + exclude: | + (?x)^( + pyproject.toml| + benchmarks/utilities/cxxopts.hpp + ) - repo: local hooks: - id: cmake-format diff --git a/benchmarks/synchronization/synchronization.hpp b/benchmarks/synchronization/synchronization.hpp index b0007d9b2..e9a3cd34a 100644 --- a/benchmarks/synchronization/synchronization.hpp +++ b/benchmarks/synchronization/synchronization.hpp @@ -50,7 +50,7 @@ } // Register the function as a benchmark. You will need to set the `UseManualTime()` - // flag in order to use the timer embeded in this class. + // flag in order to use the timer embedded in this class. BENCHMARK(sample_cuda_benchmark)->UseManualTime(); diff --git a/include/rmm/detail/stack_trace.hpp b/include/rmm/detail/stack_trace.hpp index 082cef02e..f658fe7ad 100644 --- a/include/rmm/detail/stack_trace.hpp +++ b/include/rmm/detail/stack_trace.hpp @@ -39,7 +39,7 @@ namespace rmm::detail { /** - * @brief stack_trace is a class that will capture a stack on instatiation for output later. + * @brief stack_trace is a class that will capture a stack on instantiation for output later. * It can then be used in an output stream to display stack information. * * rmm::detail::stack_trace saved_stack; diff --git a/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp b/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp index fa53e480a..ddd3cb728 100644 --- a/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp +++ b/include/rmm/mr/device/detail/stream_ordered_memory_resource.hpp @@ -58,7 +58,7 @@ struct crtp { * This base class uses CRTP (https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern) * to provide static polymorphism to enable defining suballocator resources that maintain separate * pools per stream. All of the stream-ordering logic is contained in this class, but the logic - * to determine how memory pools are managed and the type of allocation is implented in a derived + * to determine how memory pools are managed and the type of allocation is implemented in a derived * class and in a free list class. * * For example, a coalescing pool memory resource uses a coalescing_free_list and maintains data diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..8a412773e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[tool.codespell] +# note: pre-commit passes explicit lists of files here, which this skip file list doesn't override - +# this is only to allow you to run codespell interactively +skip = "./pyproject.toml,./.git,./.github,./cpp/build,.*egg-info.*,./.mypy_cache,./benchmarks/utilities/cxxopts.hpp" +# ignore short words, and typename parameters like OffsetT +ignore-regex = "\\b(.{1,4}|[A-Z]\\w*T)\\b" +ignore-words-list = "inout" +builtin = "clear" +quiet-level = 3 diff --git a/tests/device_buffer_tests.cu b/tests/device_buffer_tests.cu index fadae7bb2..d4c34385e 100644 --- a/tests/device_buffer_tests.cu +++ b/tests/device_buffer_tests.cu @@ -438,7 +438,7 @@ TYPED_TEST(DeviceBufferTest, ResizeSmaller) buff.shrink_to_fit(rmm::cuda_stream_default); EXPECT_NE(nullptr, buff.data()); - // A reallocation should have occured + // A reallocation should have occurred EXPECT_NE(old_data, buff.data()); EXPECT_EQ(new_size, buff.size()); EXPECT_EQ(buff.capacity(), buff.size()); diff --git a/tests/device_scalar_tests.cpp b/tests/device_scalar_tests.cpp index fd92f7d6e..7fbdaec29 100644 --- a/tests/device_scalar_tests.cpp +++ b/tests/device_scalar_tests.cpp @@ -71,7 +71,7 @@ using Types = ::testing::Types scalar{this->stream, this->mr}; EXPECT_NE(nullptr, scalar.data()); From d26ebfc983eef5ef7fd680a7b5b512eee367f191 Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Thu, 16 Mar 2023 17:04:54 -0400 Subject: [PATCH 22/25] Remove MANIFEST.in use auto-generated one for sdists and package_data for wheels (#1233) Using MANIFEST.in currently runs into a pretty nasty scikit-build bug (https://github.com/scikit-build/scikit-build/issues/886) that results in any file included by the manifest being copied from the install tree back into the source tree whenever an in place build occurs after an install, overwriting any local changes. We need an alternative approach to ensure that all necessary files are included in built packages. There are two types: - sdists: scikit-build automatically generates a manifest during sdist generation if we don't provide one, and that manifest is reliably complete. It contains all files needed for a source build up to the rmm C++ code (which has always been true and is something we can come back to improving later if desired). - wheels: The autogenerated manifest is not used during wheel generation because the manifest generation hook is not invoked during wheel builds, so to include data in the wheels we must provide the `package_data` argument to `setup`. In this case we do not need to include CMake or pyx files because the result does not need to be possible to build from, it just needs pxd files for other packages to cimport if desired. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Bradley Dice (https://github.com/bdice) URL: https://github.com/rapidsai/rmm/pull/1233 --- python/MANIFEST.in | 7 ------- python/setup.py | 7 ++++--- 2 files changed, 4 insertions(+), 10 deletions(-) delete mode 100644 python/MANIFEST.in diff --git a/python/MANIFEST.in b/python/MANIFEST.in deleted file mode 100644 index 0a521fd9e..000000000 --- a/python/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -# Cython files -recursive-include rmm *.pxd -recursive-include rmm *.pyx - -# Build files. Don't use a recursive include on '.' in case the repo is dirty -include . CMakeLists.txt -recursive-include rmm CMakeLists.txt diff --git a/python/setup.py b/python/setup.py index 64d8524e3..48b35d1bb 100644 --- a/python/setup.py +++ b/python/setup.py @@ -1,10 +1,11 @@ -# Copyright (c) 2019-2022, NVIDIA CORPORATION. +# Copyright (c) 2019-2023, NVIDIA CORPORATION. from setuptools import find_packages from skbuild import setup +packages = find_packages(include=["rmm*"]) setup( - packages=find_packages(include=["rmm", "rmm.*"]), - include_package_data=True, + packages=packages, + package_data={key: ["*.pxd"] for key in packages}, zip_safe=False, ) From 591726f2c4c0178daf7d9f2ca605cdf03f5137ba Mon Sep 17 00:00:00 2001 From: Robert Maynard Date: Mon, 20 Mar 2023 10:32:24 -0400 Subject: [PATCH 23/25] Use rapids-cmake parallel testing feature (#1183) Converts librmm over to use `rapids-cmake` new GPU aware parallel testing feature, which allows tests to run across all the GPUs on a machine without oversubscription. This will allow developers to run `ctest -j` and ctest will figure out given the current machine how many tests it can run in parallel given the current GPU set ( currently 4 tests per GPU ). Authors: - Robert Maynard (https://github.com/robertmaynard) Approvers: - Vyas Ramasubramani (https://github.com/vyasr) - Mark Harris (https://github.com/harrism) - Ray Douglass (https://github.com/raydouglass) URL: https://github.com/rapidsai/rmm/pull/1183 --- ci/test_cpp.sh | 6 ++--- dependencies.yaml | 8 ++++++- tests/CMakeLists.txt | 57 +++++++++++++++++++++++++++++++------------- 3 files changed, 51 insertions(+), 20 deletions(-) diff --git a/ci/test_cpp.sh b/ci/test_cpp.sh index 46022f8f2..12b3ab100 100755 --- a/ci/test_cpp.sh +++ b/ci/test_cpp.sh @@ -32,9 +32,9 @@ trap "EXITCODE=1" ERR set +e rapids-logger "Running googletests" -for gt in "$CONDA_PREFIX/bin/gtests/librmm/"* ; do - ${gt} --gtest_output=xml:${RAPIDS_TESTS_DIR}/ -done +cd $CONDA_PREFIX/bin/gtests/librmm/ +export GTEST_OUTPUT=xml:${RAPIDS_TESTS_DIR}/ +ctest -j20 --output-on-failure rapids-logger "Test script exiting with value: $EXITCODE" exit ${EXITCODE} diff --git a/dependencies.yaml b/dependencies.yaml index e6fde1b98..37885a940 100644 --- a/dependencies.yaml +++ b/dependencies.yaml @@ -22,6 +22,7 @@ files: output: none includes: - cudatoolkit + - test_cpp checks: output: none includes: @@ -41,7 +42,7 @@ dependencies: common: - output_types: [conda, requirements] packages: - - cmake>=3.23.1,!=3.25.0 + - &cmake_ver cmake>=3.23.1,!=3.25.0 - cuda-python>=11.7.1,<12.0 - cython>=0.29,<0.30 - ninja @@ -121,6 +122,11 @@ dependencies: packages: - numba>=0.49 - numpy>=1.19 + test_cpp: + common: + - output_types: conda + packages: + - *cmake_ver test_python: common: - output_types: [conda, requirements] diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index ecb44a49c..24e9d240d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -16,6 +16,9 @@ option(DISABLE_DEPRECATION_WARNING "Disable warnings generated from deprecated declarations." OFF) option(CODE_COVERAGE "Enable generating code coverage with gcov." OFF) +include(rapids-test) +rapids_test_init() + # This function takes in a test name and test source and handles setting all of the associated # properties and linking to build the test function(ConfigureTestInternal TEST_NAME) @@ -68,48 +71,68 @@ function(ConfigureTestInternal TEST_NAME) PROPERTY ADDITIONAL_CLEAN_FILES ${COVERAGE_CLEAN_FILES}) endif() - add_test(NAME ${TEST_NAME} COMMAND ${TEST_NAME}) - - install( - TARGETS ${TEST_NAME} - COMPONENT testing - DESTINATION bin/gtests/librmm - EXCLUDE_FROM_ALL) endfunction() # Wrapper around `ConfigureTestInternal` that builds tests both with and without per thread default # stream function(ConfigureTest TEST_NAME) + + set(options) + set(one_value GPUS PERCENT) + set(multi_value) + cmake_parse_arguments(_RMM_TEST "${options}" "${one_value}" "${multi_value}" ${ARGN}) + if(NOT DEFINED _RMM_TEST_GPUS AND NOT DEFINED _RMM_TEST_PERCENT) + set(_RMM_TEST_GPUS 1) + set(_RMM_TEST_PERCENT 5) + endif() + if(NOT DEFINED _RMM_TEST_GPUS) + set(_RMM_TEST_GPUS 1) + endif() + if(NOT DEFINED _RMM_TEST_PERCENT) + set(_RMM_TEST_PERCENT 100) + endif() + # Test with legacy default stream. - ConfigureTestInternal(${TEST_NAME} ${ARGN}) + ConfigureTestInternal(${TEST_NAME} ${_RMM_TEST_UNPARSED_ARGUMENTS}) # Test with per-thread default stream. string(REGEX REPLACE "_TEST$" "_PTDS_TEST" PTDS_TEST_NAME "${TEST_NAME}") - ConfigureTestInternal("${PTDS_TEST_NAME}" ${ARGN}) + ConfigureTestInternal("${PTDS_TEST_NAME}" ${_RMM_TEST_UNPARSED_ARGUMENTS}) target_compile_definitions("${PTDS_TEST_NAME}" PUBLIC CUDA_API_PER_THREAD_DEFAULT_STREAM) # Test with custom thrust namespace string(REGEX REPLACE "_TEST$" "_NAMESPACE_TEST" NS_TEST_NAME "${TEST_NAME}") - ConfigureTestInternal("${NS_TEST_NAME}" ${ARGN}) + ConfigureTestInternal("${NS_TEST_NAME}" ${_RMM_TEST_UNPARSED_ARGUMENTS}) target_compile_definitions("${NS_TEST_NAME}" PUBLIC THRUST_WRAPPED_NAMESPACE=rmm_thrust) + + foreach(name ${TEST_NAME} ${PTDS_TEST_NAME} ${NS_TEST_NAME}) + rapids_test_add( + NAME ${name} + COMMAND ${TEST_NAME} + GPUS ${_RMM_TEST_GPUS} + PERCENT ${_RMM_TEST_PERCENT} + INSTALL_COMPONENT_SET testing) + endforeach() + endfunction() # test sources # device mr tests -ConfigureTest(DEVICE_MR_TEST mr/device/mr_tests.cpp mr/device/mr_multithreaded_tests.cpp) +ConfigureTest(DEVICE_MR_TEST mr/device/mr_tests.cpp mr/device/mr_multithreaded_tests.cpp GPUS 1 + PERCENT 90) # general adaptor tests ConfigureTest(ADAPTOR_TEST mr/device/adaptor_tests.cpp) # pool mr tests -ConfigureTest(POOL_MR_TEST mr/device/pool_mr_tests.cpp) +ConfigureTest(POOL_MR_TEST mr/device/pool_mr_tests.cpp GPUS 1 PERCENT 60) # cuda_async mr tests -ConfigureTest(CUDA_ASYNC_MR_TEST mr/device/cuda_async_mr_tests.cpp) +ConfigureTest(CUDA_ASYNC_MR_TEST mr/device/cuda_async_mr_tests.cpp GPUS 1 PERCENT 60) # thrust allocator tests -ConfigureTest(THRUST_ALLOCATOR_TEST mr/device/thrust_allocator_tests.cu) +ConfigureTest(THRUST_ALLOCATOR_TEST mr/device/thrust_allocator_tests.cu GPUS 1 PERCENT 60) # polymorphic allocator tests ConfigureTest(POLYMORPHIC_ALLOCATOR_TEST mr/device/polymorphic_allocator_tests.cpp) @@ -148,13 +171,15 @@ ConfigureTest(DEVICE_SCALAR_TEST device_scalar_tests.cpp) ConfigureTest(LOGGER_TEST logger_tests.cpp) # uvector tests -ConfigureTest(DEVICE_UVECTOR_TEST device_uvector_tests.cpp) +ConfigureTest(DEVICE_UVECTOR_TEST device_uvector_tests.cpp GPUS 1 PERCENT 60) # arena MR tests -ConfigureTest(ARENA_MR_TEST mr/device/arena_mr_tests.cpp) +ConfigureTest(ARENA_MR_TEST mr/device/arena_mr_tests.cpp GPUS 1 PERCENT 60) # binning MR tests ConfigureTest(BINNING_MR_TEST mr/device/binning_mr_tests.cpp) # callback memory resource tests ConfigureTest(CALLBACK_MR_TEST mr/device/callback_mr_tests.cpp) + +rapids_test_install_relocatable(INSTALL_COMPONENT_SET testing DESTINATION bin/gtests/librmm) From d93f011336e37f482b86288310c990de423636ba Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Tue, 21 Mar 2023 16:23:08 -0400 Subject: [PATCH 24/25] Stop setting package version attribute in wheels (#1236) This PR removes modification of the `__init__.py::version` attribute that occurs during the wheel build process. See https://github.com/rapidsai/ops/issues/2592 for more information. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Sevag H (https://github.com/sevagh) URL: https://github.com/rapidsai/rmm/pull/1236 --- ci/release/apply_wheel_modifications.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/ci/release/apply_wheel_modifications.sh b/ci/release/apply_wheel_modifications.sh index 90472d869..6889520a9 100755 --- a/ci/release/apply_wheel_modifications.sh +++ b/ci/release/apply_wheel_modifications.sh @@ -6,7 +6,6 @@ VERSION=${1} CUDA_SUFFIX=${2} -sed -i "s/__version__ = .*/__version__ = \"${VERSION}\"/g" python/rmm/__init__.py sed -i "s/^version = .*/version = \"${VERSION}\"/g" python/pyproject.toml sed -i "s/^name = \"rmm\"/name = \"rmm${CUDA_SUFFIX}\"/g" python/pyproject.toml From e8fbd06e98c22d91f2b453f4b475fe9c4a93958a Mon Sep 17 00:00:00 2001 From: Vyas Ramasubramani Date: Tue, 21 Mar 2023 16:35:26 -0400 Subject: [PATCH 25/25] Generate pyproject.toml dependencies using dfg (#1219) This PR updates dependencies.yaml to also generates the relevant dependency sections of pyproject.toml. Authors: - Vyas Ramasubramani (https://github.com/vyasr) Approvers: - Bradley Dice (https://github.com/bdice) URL: https://github.com/rapidsai/rmm/pull/1219 --- .pre-commit-config.yaml | 2 +- dependencies.yaml | 34 +++++++++++++++++++++++++++++----- python/pyproject.toml | 22 +++++++++++----------- 3 files changed, 41 insertions(+), 17 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bcbf17018..b230cafb7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/rapidsai/dependency-file-generator - rev: v1.4.0 + rev: v1.5.1 hooks: - id: rapids-dependency-file-generator args: ["--clean"] diff --git a/dependencies.yaml b/dependencies.yaml index 37885a940..f39f62803 100644 --- a/dependencies.yaml +++ b/dependencies.yaml @@ -34,25 +34,48 @@ files: - cudatoolkit - docs - py_version + py_build: + output: pyproject + extras: + table: build-system + includes: + - build + py_run: + output: pyproject + extras: + table: project + includes: + - run + py_optional_test: + output: pyproject + extras: + table: project.optional-dependencies + key: test + includes: + - test_python channels: - rapidsai - conda-forge dependencies: build: common: - - output_types: [conda, requirements] + - output_types: [conda, requirements, pyproject] packages: - &cmake_ver cmake>=3.23.1,!=3.25.0 - - cuda-python>=11.7.1,<12.0 + - &cuda_python cuda-python>=11.7.1,<12.0 - cython>=0.29,<0.30 - ninja - - python>=3.8,<3.11 - scikit-build>=0.13.1 - tomli - output_types: conda packages: - fmt>=9.1.0,<10 - spdlog>=1.11.0,<1.12 + - python>=3.8,<3.11 + - output_types: pyproject + packages: + - wheel + - setuptools>=61.0.0 checks: common: - output_types: [conda, requirements] @@ -118,10 +141,11 @@ dependencies: - python=3.10 run: common: - - output_types: [conda, requirements] + - output_types: [conda, requirements, pyproject] packages: - numba>=0.49 - numpy>=1.19 + - *cuda_python test_cpp: common: - output_types: conda @@ -129,7 +153,7 @@ dependencies: - *cmake_ver test_python: common: - - output_types: [conda, requirements] + - output_types: [conda, requirements, pyproject] packages: - pytest - pytest-cov diff --git a/python/pyproject.toml b/python/pyproject.toml index 873e21445..a462c341f 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -15,15 +15,15 @@ [build-system] build-backend = "setuptools.build_meta" requires = [ - "wheel", - "setuptools>=61.0.0", - "cython>=0.29,<0.30", - "scikit-build>=0.13.1", "cmake>=3.23.1,!=3.25.0", - "ninja", "cuda-python>=11.7.1,<12.0", - "tomli; python_version < '3.11'", -] + "cython>=0.29,<0.30", + "ninja", + "scikit-build>=0.13.1", + "setuptools>=61.0.0", + "tomli", + "wheel", +] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../dependencies.yaml and run `rapids-dependency-file-generator`. [project] name = "rmm" @@ -37,9 +37,9 @@ license = { text = "Apache 2.0" } requires-python = ">=3.8" dependencies = [ "cuda-python>=11.7.1,<12.0", - "numpy>=1.19", "numba>=0.49", -] + "numpy>=1.19", +] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../dependencies.yaml and run `rapids-dependency-file-generator`. classifiers = [ "Intended Audience :: Developers", "Topic :: Database", @@ -53,8 +53,8 @@ classifiers = [ [project.optional-dependencies] test = [ "pytest", - "pytest-xdist", -] + "pytest-cov", +] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../dependencies.yaml and run `rapids-dependency-file-generator`. [project.urls] Homepage = "https://github.com/rapidsai/rmm"