Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[IR] Support SelectRowsType #55041

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions paddle/fluid/ir/dialect/op_generator/op_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,6 +483,7 @@ def parse_output_type_list(self):
output_type_map = {
'Tensor': 'paddle::dialect::DenseTensorType',
'Tensor[]': 'ir::VectorType<paddle::dialect::DenseTensorType>',
'SelectedRows': 'paddle::dialect::SelectedRowsType',
}
type_list = []
for output_info in self.op_yaml_item['outputs']:
Expand Down
1 change: 1 addition & 0 deletions paddle/fluid/ir/dialect/pd_dialect.cc
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ PaddleDialect::PaddleDialect(ir::IrContext *context)

void PaddleDialect::initialize() {
RegisterTypes<paddle::dialect::DenseTensorType>();
RegisterTypes<paddle::dialect::SelectedRowsType>();

RegisterAttributes<paddle::dialect::IntArrayAttribute,
paddle::dialect::DataTypeAttribute,
Expand Down
42 changes: 42 additions & 0 deletions paddle/fluid/ir/dialect/pd_op.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -161,3 +161,45 @@
- {typename: 'Tensor', name: out, optional: false, intermediate: false}
no_need_buffer: null
data_transform: null

- name: embedding_grad_sparse
inputs:
- typename: Tensor
name: x
optional: false
no_need_buffer: false
data_transform: {}
- typename: Tensor
name: weight
optional: false
no_need_buffer: false
data_transform: {}
- typename: Tensor
name: out_grad
optional: false
no_need_buffer: false
data_transform: {}
attrs:
- {typename: int64_t, name: padding_idx, default_value: '-1'}
- {typename: bool, name: sparse, default_value: 'false'}
outputs:
- {typename: SelectedRows, name: weight_grad, optional: false, intermediate: false}
no_need_buffer: null
data_transform: null
infer_meta:
func: UnchangedInferMeta
param: [weight]
kernel:
func: [embedding_grad_sparse]
param: [x, weight, out_grad, padding_idx, sparse]
backend: null
layout: null
data_type:
ordered: false
candidates: [weight]
to_complex_flag: [false]
dispatch: {embedding_grad_sparse: null}
force_backend: null
inplace: null
view: null
backward: null
13 changes: 13 additions & 0 deletions paddle/fluid/ir/dialect/pd_type.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,20 @@ const phi::LoD& DenseTensorType::lod() const { return storage()->lod_; }

const size_t& DenseTensorType::offset() const { return storage()->offset_; }

const ir::Type& SelectedRowsType::dtype() const { return storage()->dtype_; }

const phi::DDim& SelectedRowsType::dims() const { return storage()->dims_; }

const phi::DataLayout& SelectedRowsType::data_layout() const {
return storage()->layout_;
}

const phi::LoD& SelectedRowsType::lod() const { return storage()->lod_; }

const size_t& SelectedRowsType::offset() const { return storage()->offset_; }

} // namespace dialect
} // namespace paddle

IR_DEFINE_EXPLICIT_TYPE_ID(paddle::dialect::DenseTensorType)
IR_DEFINE_EXPLICIT_TYPE_ID(paddle::dialect::SelectedRowsType)
18 changes: 18 additions & 0 deletions paddle/fluid/ir/dialect/pd_type.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,25 @@ class DenseTensorType : public ir::Type {
const size_t &offset() const;
};

class SelectedRowsType : public ir::Type {
public:
using Type::Type;

DECLARE_TYPE_UTILITY_FUNCTOR(SelectedRowsType, SelectedRowsTypeStorage);

const ir::Type &dtype() const;

const phi::DDim &dims() const;

const phi::DataLayout &data_layout() const;

const phi::LoD &lod() const;

const size_t &offset() const;
};

} // namespace dialect
} // namespace paddle

IR_DECLARE_EXPLICIT_TYPE_ID(paddle::dialect::DenseTensorType)
IR_DECLARE_EXPLICIT_TYPE_ID(paddle::dialect::SelectedRowsType)
81 changes: 81 additions & 0 deletions paddle/fluid/ir/dialect/pd_type_storage.h
Original file line number Diff line number Diff line change
Expand Up @@ -128,5 +128,86 @@ struct DenseTensorTypeStorage : public ir::TypeStorage {
size_t offset_;
};

struct SelectedRowsTypeStorage : public ir::TypeStorage {
using DataLayout = phi::DataLayout;
using Dim = phi::DDim;
using LoD = std::vector<std::vector<size_t>>;
///
/// \brief Declare ParamKey according to parameter type.
///
using ParamKey =
std::tuple<ir::Type, phi::DDim, phi::DataLayout, phi::LoD, size_t>;

SelectedRowsTypeStorage(const ir::Type& dtype,
const phi::DDim& dims,
const phi::DataLayout& layout,
const phi::LoD& lod,
size_t offset)
: dtype_(dtype),
dims_(dims),
layout_(layout),
lod_(lod),
offset_(offset) {}

///
/// \brief Each derived TypeStorage must define a Construct method, which
/// StorageManager uses to construct a derived TypeStorage.
///
static SelectedRowsTypeStorage* Construct(const ParamKey& key) {
return new SelectedRowsTypeStorage(std::get<0>(key),
std::get<1>(key),
std::get<2>(key),
std::get<3>(key),
std::get<4>(key));
}

///
/// \brief Each derived TypeStorage must provide a HashValue method.
///
static std::size_t HashValue(const ParamKey& key) {
std::size_t hash_value = 317;
// hash dtype
hash_value =
ir::hash_combine(hash_value, std::hash<ir::Type>()(std::get<0>(key)));
// hash dims
hash_value =
ir::hash_combine(hash_value, std::hash<phi::DDim>()(std::get<1>(key)));
// hash layout
hash_value = ir::hash_combine(
hash_value,
std::hash<std::underlying_type<phi::DataLayout>::type>()(
static_cast<std::underlying_type<phi::DataLayout>::type>(
std::get<2>(key))));
// hash lod
hash_value =
ir::hash_combine(hash_value, std::hash<phi::LoD>()(std::get<3>(key)));
// hash offset
hash_value =
ir::hash_combine(hash_value, std::hash<size_t>()(std::get<4>(key)));
return hash_value;
}

///
/// \brief Each derived TypeStorage needs to overload operator==.
///
bool operator==(const ParamKey& key) const {
return ParamKey(dtype_, dims_, layout_, lod_, offset_) == key;
}

ParamKey GetAsKey() const {
return ParamKey(dtype_, dims_, layout_, lod_, offset_);
}

///
/// \brief DenseTensorTypeStorage include five parameters: dims, dtype,
/// layout, lod, offset.
///
ir::Type dtype_;
phi::DDim dims_;
phi::DataLayout layout_;
phi::LoD lod_;
size_t offset_;
};

} // namespace dialect
} // namespace paddle
18 changes: 18 additions & 0 deletions test/cpp/ir/core/type_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#include <gtest/gtest.h>
#include <unordered_map>

#include "paddle/fluid/ir/dialect/pd_type.h"
#include "paddle/ir/core/builtin_dialect.h"
#include "paddle/ir/core/builtin_type.h"
#include "paddle/ir/core/dialect.h"
Expand Down Expand Up @@ -229,6 +230,23 @@ TEST(type_test, custom_type_dialect) {
EXPECT_EQ(dialect_integer1, dialect_integer2);
}

TEST(type_test, pd_dialect) {
ir::IrContext *ctx = ir::IrContext::Instance();
ir::Type fp32_dtype = ir::Float32Type::get(ctx);
phi::DDim dims = {2, 2};
phi::DataLayout data_layout = phi::DataLayout::NCHW;
phi::LoD lod = {{0, 1, 2}};
size_t offset = 0;
paddle::dialect::SelectedRowsType select_rows_dtype =
paddle::dialect::SelectedRowsType::get(
ctx, fp32_dtype, dims, data_layout, lod, offset);
EXPECT_EQ(select_rows_dtype.dtype().isa<ir::Float32Type>(), true);
EXPECT_EQ(select_rows_dtype.dims(), dims);
EXPECT_EQ(select_rows_dtype.data_layout(), data_layout);
EXPECT_EQ(select_rows_dtype.lod(), lod);
EXPECT_EQ(select_rows_dtype.offset(), offset);
}

namespace TestNamespace {
class TestClass {};
} // namespace TestNamespace
Expand Down