Skip to content

Commit

Permalink
Rename IterativeDMatrix. (#8045)
Browse files Browse the repository at this point in the history
  • Loading branch information
trivialfis committed Jul 4, 2022
1 parent f24bfc7 commit 8746f9c
Show file tree
Hide file tree
Showing 5 changed files with 29 additions and 32 deletions.
4 changes: 2 additions & 2 deletions src/data/data.cc
Expand Up @@ -26,7 +26,7 @@
#include "../common/group_data.h"
#include "../common/threading_utils.h"
#include "../data/adapter.h"
#include "../data/iterative_device_dmatrix.h"
#include "../data/iterative_dmatrix.h"
#include "file_iterator.h"

#include "validation.h"
Expand Down Expand Up @@ -938,7 +938,7 @@ DMatrix *DMatrix::Create(DataIterHandle iter, DMatrixHandle proxy,
XGDMatrixCallbackNext *next, float missing,
int nthread,
int max_bin) {
return new data::IterativeDeviceDMatrix(iter, proxy, reset, next, missing,
return new data::IterativeDMatrix(iter, proxy, reset, next, missing,
nthread, max_bin);
}

Expand Down
@@ -1,13 +1,13 @@
/*!
* Copyright 2020 XGBoost contributors
* Copyright 2020-2022 XGBoost contributors
*/
#include <memory>
#include <type_traits>
#include <algorithm>

#include "../common/hist_util.cuh"
#include "simple_batch_iterator.h"
#include "iterative_device_dmatrix.h"
#include "iterative_dmatrix.h"
#include "sparse_page_source.h"
#include "ellpack_page.cuh"
#include "proxy_dmatrix.h"
Expand All @@ -16,7 +16,7 @@

namespace xgboost {
namespace data {
void IterativeDeviceDMatrix::Initialize(DataIterHandle iter_handle, float missing) {
void IterativeDMatrix::InitFromCUDA(DataIterHandle iter_handle, float missing) {
// A handle passed to external iterator.
DMatrixProxy* proxy = MakeProxy(proxy_);
CHECK(proxy);
Expand Down Expand Up @@ -160,7 +160,7 @@ void IterativeDeviceDMatrix::Initialize(DataIterHandle iter_handle, float missin
rabit::Allreduce<rabit::op::Max>(&info_.num_col_, 1);
}

BatchSet<EllpackPage> IterativeDeviceDMatrix::GetEllpackBatches(const BatchParam& param) {
BatchSet<EllpackPage> IterativeDMatrix::GetEllpackBatches(const BatchParam& param) {
CHECK(page_);
// FIXME(Jiamingy): https://github.com/dmlc/xgboost/issues/7976
if (param.max_bin != batch_param_.max_bin) {
Expand Down
@@ -1,9 +1,9 @@
/*!
* Copyright 2020 by Contributors
* \file iterative_device_dmatrix.h
* Copyright 2020-2022 by Contributors
* \file iterative_dmatrix.h
*/
#ifndef XGBOOST_DATA_ITERATIVE_DEVICE_DMATRIX_H_
#define XGBOOST_DATA_ITERATIVE_DEVICE_DMATRIX_H_
#ifndef XGBOOST_DATA_ITERATIVE_DMATRIX_H_
#define XGBOOST_DATA_ITERATIVE_DMATRIX_H_

#include <vector>
#include <string>
Expand All @@ -19,7 +19,7 @@
namespace xgboost {
namespace data {

class IterativeDeviceDMatrix : public DMatrix {
class IterativeDMatrix : public DMatrix {
MetaInfo info_;
Context ctx_;
BatchParam batch_param_;
Expand All @@ -30,18 +30,17 @@ class IterativeDeviceDMatrix : public DMatrix {
XGDMatrixCallbackNext *next_;

public:
void Initialize(DataIterHandle iter, float missing);
void InitFromCUDA(DataIterHandle iter, float missing);

public:
explicit IterativeDeviceDMatrix(DataIterHandle iter, DMatrixHandle proxy,
DataIterResetCallback *reset, XGDMatrixCallbackNext *next,
float missing, int nthread, int max_bin)
explicit IterativeDMatrix(DataIterHandle iter, DMatrixHandle proxy, DataIterResetCallback *reset,
XGDMatrixCallbackNext *next, float missing, int nthread, int max_bin)
: proxy_{proxy}, reset_{reset}, next_{next} {
batch_param_ = BatchParam{0, max_bin};
batch_param_ = BatchParam{MakeProxy(proxy_)->DeviceIdx(), max_bin};
ctx_.UpdateAllowUnknown(Args{{"nthread", std::to_string(nthread)}});
this->Initialize(iter, missing);
this->InitFromCUDA(iter, missing);
}
~IterativeDeviceDMatrix() override = default;
~IterativeDMatrix() override = default;

bool EllpackExists() const override { return true; }
bool SparsePageExists() const override { return false; }
Expand Down Expand Up @@ -77,14 +76,14 @@ class IterativeDeviceDMatrix : public DMatrix {
};

#if !defined(XGBOOST_USE_CUDA)
inline void IterativeDeviceDMatrix::Initialize(DataIterHandle iter, float missing) {
inline void IterativeDMatrix::InitFromCUDA(DataIterHandle iter, float missing) {
// silent the warning about unused variables.
(void)(proxy_);
(void)(reset_);
(void)(next_);
common::AssertGPUSupport();
}
inline BatchSet<EllpackPage> IterativeDeviceDMatrix::GetEllpackBatches(const BatchParam& param) {
inline BatchSet<EllpackPage> IterativeDMatrix::GetEllpackBatches(const BatchParam& param) {
common::AssertGPUSupport();
auto begin_iter =
BatchIterator<EllpackPage>(new SimpleBatchIteratorImpl<EllpackPage>(page_));
Expand All @@ -94,4 +93,4 @@ inline BatchSet<EllpackPage> IterativeDeviceDMatrix::GetEllpackBatches(const Bat
} // namespace data
} // namespace xgboost

#endif // XGBOOST_DATA_ITERATIVE_DEVICE_DMATRIX_H_
#endif // XGBOOST_DATA_ITERATIVE_DMATRIX_H_
@@ -1,10 +1,10 @@
/*!
* Copyright 2020 XGBoost contributors
* Copyright 2020-2022 XGBoost contributors
*/
#include <gtest/gtest.h>

#include "../helpers.h"
#include "../../../src/data/iterative_device_dmatrix.h"
#include "../../../src/data/iterative_dmatrix.h"
#include "../../../src/data/ellpack_page.cuh"
#include "../../../src/data/device_adapter.cuh"

Expand All @@ -13,7 +13,7 @@ namespace data {

void TestEquivalent(float sparsity) {
CudaArrayIterForTest iter{sparsity};
IterativeDeviceDMatrix m(
IterativeDMatrix m(
&iter, iter.Proxy(), Reset, Next, std::numeric_limits<float>::quiet_NaN(),
0, 256);
size_t offset = 0;
Expand Down Expand Up @@ -88,7 +88,7 @@ TEST(IterativeDeviceDMatrix, Basic) {

TEST(IterativeDeviceDMatrix, RowMajor) {
CudaArrayIterForTest iter(0.0f);
IterativeDeviceDMatrix m(
IterativeDMatrix m(
&iter, iter.Proxy(), Reset, Next, std::numeric_limits<float>::quiet_NaN(),
0, 256);
size_t n_batches = 0;
Expand Down Expand Up @@ -139,7 +139,7 @@ TEST(IterativeDeviceDMatrix, RowMajorMissing) {
reinterpret_cast<float *>(get<Integer>(j_interface["data"][0])));
thrust::copy(h_data.cbegin(), h_data.cend(), ptr);

IterativeDeviceDMatrix m(
IterativeDMatrix m(
&iter, iter.Proxy(), Reset, Next, std::numeric_limits<float>::quiet_NaN(),
0, 256);
auto &ellpack = *m.GetBatches<EllpackPage>({0, 256}).begin();
Expand All @@ -159,7 +159,7 @@ TEST(IterativeDeviceDMatrix, IsDense) {
int num_bins = 16;
auto test = [num_bins] (float sparsity) {
CudaArrayIterForTest iter(sparsity);
IterativeDeviceDMatrix m(
IterativeDMatrix m(
&iter, iter.Proxy(), Reset, Next, std::numeric_limits<float>::quiet_NaN(),
0, 256);
if (sparsity == 0.0) {
Expand Down
8 changes: 3 additions & 5 deletions tests/cpp/helpers.cu
Expand Up @@ -2,7 +2,7 @@

#include "helpers.h"
#include "../../src/data/device_adapter.cuh"
#include "../../src/data/iterative_device_dmatrix.h"
#include "../../src/data/iterative_dmatrix.h"

namespace xgboost {

Expand All @@ -28,12 +28,10 @@ int CudaArrayIterForTest::Next() {
return 1;
}


std::shared_ptr<DMatrix> RandomDataGenerator::GenerateDeviceDMatrix() {
CudaArrayIterForTest iter{this->sparsity_, this->rows_, this->cols_, 1};
auto m = std::make_shared<data::IterativeDeviceDMatrix>(
&iter, iter.Proxy(), Reset, Next, std::numeric_limits<float>::quiet_NaN(),
0, bins_);
auto m = std::make_shared<data::IterativeDMatrix>(
&iter, iter.Proxy(), Reset, Next, std::numeric_limits<float>::quiet_NaN(), 0, bins_);
return m;
}
} // namespace xgboost

0 comments on commit 8746f9c

Please sign in to comment.