From 5384ed85c8d83fd9393b5d6ddcb447b74b95a98c Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Fri, 18 Sep 2020 12:51:49 +0800 Subject: [PATCH] Use caching allocator from RMM, when RMM is enabled (#6131) --- src/common/device_helpers.cuh | 7 ++++++- src/metric/rank_metric.cu | 7 ++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/common/device_helpers.cuh b/src/common/device_helpers.cuh index b3c05ce23e3d..70b3895fc7fe 100644 --- a/src/common/device_helpers.cuh +++ b/src/common/device_helpers.cuh @@ -406,10 +406,14 @@ struct XGBDefaultDeviceAllocatorImpl : XGBBaseDeviceAllocator { } #if defined(XGBOOST_USE_RMM) && XGBOOST_USE_RMM == 1 XGBDefaultDeviceAllocatorImpl() - : SuperT(rmm::mr::get_current_device_resource(), cudaStream_t{0}) {} + : SuperT(rmm::mr::get_current_device_resource(), cudaStream_t{nullptr}) {} #endif // defined(XGBOOST_USE_RMM) && XGBOOST_USE_RMM == 1 }; +#if defined(XGBOOST_USE_RMM) && XGBOOST_USE_RMM == 1 +template +using XGBCachingDeviceAllocatorImpl = XGBDefaultDeviceAllocatorImpl; +#else /** * \brief Caching memory allocator, uses cub::CachingDeviceAllocator as a back-end and logs * allocations if verbose. Does not initialise memory on construction. @@ -448,6 +452,7 @@ struct XGBCachingDeviceAllocatorImpl : thrust::device_malloc_allocator { // no-op } }; +#endif // defined(XGBOOST_USE_RMM) && XGBOOST_USE_RMM == 1 } // namespace detail // Declare xgboost allocators diff --git a/src/metric/rank_metric.cu b/src/metric/rank_metric.cu index 290b1fb023c5..fbb8e5f854ba 100644 --- a/src/metric/rank_metric.cu +++ b/src/metric/rank_metric.cu @@ -12,6 +12,7 @@ #include #include +#include #include #include "metric_common.h" @@ -379,7 +380,11 @@ struct EvalAucGpu : public Metric { } }); - auto nunique_preds = seg_idx.back(); + std::array h_nunique_preds; + dh::safe_cuda(cudaMemcpyAsync(h_nunique_preds.data(), + seg_idx.data().get() + seg_idx.size() - 1, + sizeof(uint32_t), cudaMemcpyDeviceToHost)); + auto nunique_preds = h_nunique_preds.back(); ReleaseMemory(seg_idx); // Next, accumulate the positive and negative precisions for every prediction group