Skip to content

Commit

Permalink
Remove padding from device_memory_resource (#1278)
Browse files Browse the repository at this point in the history
This PR removes the `align_up` call that enforces 8B padding on all RMM device memory allocations. Padding, where needed, should be the responsibility of callers, possibly with the help of a padding adapter. It should not be enforced by rmm.

I looked at all other calls to `align_up` in the library but didn't find any others that needed removing. The other calls all seemed to be used to guarantee specific intentional alignment requirements of other memory resources or adapters. I would appreciate verification from reviewers, though.

I've labeled this PR as breaking because it could break consumers that were implicitly relying on the current padding behavior.

Resolves #865
Resolves #1156

Authors:
  - Vyas Ramasubramani (https://github.com/vyasr)

Approvers:
  - Rong Ou (https://github.com/rongou)
  - Mark Harris (https://github.com/harrism)

URL: #1278
  • Loading branch information
vyasr authored Jun 2, 2023
1 parent 8b3e2c4 commit 64da5fe
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 10 deletions.
7 changes: 2 additions & 5 deletions include/rmm/mr/device/device_memory_resource.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ class device_memory_resource {
*/
void* allocate(std::size_t bytes, cuda_stream_view stream = cuda_stream_view{})
{
return do_allocate(rmm::detail::align_up(bytes, allocation_size_alignment), stream);
return do_allocate(bytes, stream);
}

/**
Expand All @@ -128,7 +128,7 @@ class device_memory_resource {
*/
void deallocate(void* ptr, std::size_t bytes, cuda_stream_view stream = cuda_stream_view{})
{
do_deallocate(ptr, rmm::detail::align_up(bytes, allocation_size_alignment), stream);
do_deallocate(ptr, bytes, stream);
}

/**
Expand Down Expand Up @@ -178,9 +178,6 @@ class device_memory_resource {
}

private:
// All allocations are padded to a multiple of allocation_size_alignment bytes.
static constexpr auto allocation_size_alignment = std::size_t{8};

/**
* @brief Allocates memory of size at least \p bytes.
*
Expand Down
2 changes: 1 addition & 1 deletion tests/logger_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ void expect_log_events(std::string const& filename,
// EXPECT_EQ(expected.stream, actual.stream);
EXPECT_EQ(expected.act, actual.act);
// device_memory_resource automatically pads an allocation to a multiple of 8 bytes
EXPECT_EQ(rmm::detail::align_up(expected.size, 8), actual.size);
EXPECT_EQ(expected.size, actual.size);
EXPECT_EQ(expected.pointer, actual.pointer);
return true;
});
Expand Down
6 changes: 2 additions & 4 deletions tests/mr/device/aligned_mr_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,8 @@ TEST(AlignedTest, DefaultAllocationAlignmentPassthrough)
cuda_stream_view stream;
void* const pointer = int_to_address(123);

// device_memory_resource aligns to 8.
{
auto const size{8};
auto const size{5};
EXPECT_CALL(mock, do_allocate(size, stream)).WillOnce(Return(pointer));
EXPECT_CALL(mock, do_deallocate(pointer, size, stream)).Times(1);
}
Expand All @@ -110,9 +109,8 @@ TEST(AlignedTest, BelowAlignmentThresholdPassthrough)

cuda_stream_view stream;
void* const pointer = int_to_address(123);
// device_memory_resource aligns to 8.
{
auto const size{8};
auto const size{3};
EXPECT_CALL(mock, do_allocate(size, stream)).WillOnce(Return(pointer));
EXPECT_CALL(mock, do_deallocate(pointer, size, stream)).Times(1);
}
Expand Down

0 comments on commit 64da5fe

Please sign in to comment.