Skip to content

Commit

Permalink
[Eager] Add hierarchical_sigmoid yaml (#44638)
Browse files Browse the repository at this point in the history
  • Loading branch information
ZHUI committed Jul 27, 2022
1 parent ae25ab5 commit ea91ca2
Show file tree
Hide file tree
Showing 5 changed files with 68 additions and 10 deletions.
2 changes: 1 addition & 1 deletion paddle/phi/api/yaml/generator/api_base.py
Expand Up @@ -135,7 +135,7 @@ def parse_input_and_attr(self, api_name, args_config, optional_vars=[]):
'double': 'double',
'bool': 'bool',
'str': 'const std::string&',
'str[] ': 'const std::vector<std::string>&',
'str[]': 'const std::vector<std::string>&',
'Place': 'const Place&',
'DataLayout': 'DataLayout',
'DataType': 'DataType',
Expand Down
12 changes: 12 additions & 0 deletions paddle/phi/api/yaml/legacy_api.yaml
Expand Up @@ -1038,6 +1038,18 @@
func : hard_swish
backward : hard_swish_grad

# hierarchical_sigmoid
- api : hierarchical_sigmoid
args : (Tensor x, Tensor w, Tensor label, Tensor path, Tensor code, Tensor bias, int num_classes, bool remote_prefetch, int trainer_id, int64_t[] height_sections, str[] epmap, str[] table_names, bool is_sparse)
output : Tensor(out), Tensor(pre_out), Tensor(w_out)
infer_meta :
func : HierarchicalSigmoidInferMeta
optional: path, code, bias
kernel :
func : hierarchical_sigmoid
data_type : x
backward : hierarchical_sigmoid_grad

# histogram
- api : histogram
args : (Tensor x, int64_t bins, int min, int max)
Expand Down
13 changes: 12 additions & 1 deletion paddle/phi/api/yaml/legacy_backward.yaml
Expand Up @@ -935,6 +935,17 @@
func : hard_swish_grad
inplace : (out_grad -> x_grad)

- backward_api : hierarchical_sigmoid_grad
forward : hierarchical_sigmoid (Tensor x, Tensor w, Tensor label, Tensor path, Tensor code, Tensor bias, int num_classes, bool remote_prefetch, int trainer_id, int64_t[] height_sections, str[] epmap, str[] table_names, bool is_sparse) -> Tensor(out), Tensor(pre_out), Tensor(w_out)
args : (Tensor x, Tensor w, Tensor label, Tensor path, Tensor code, Tensor bias, Tensor pre_out, Tensor out_grad, int num_classes, bool remote_prefetch, int trainer_id, int64_t[] height_sections, str[] epmap, str[] table_names, bool is_sparse)
output : Tensor(x_grad), Tensor(w_grad), Tensor(bias_grad)
infer_meta :
func : GeneralTernaryGradInferMeta
param : [x ,w, bias]
optional: path, code, bias
kernel :
func : hierarchical_sigmoid_grad

- backward_api : huber_loss_grad
forward : huber_loss (Tensor input, Tensor label, float delta) -> Tensor(out), Tensor(residual)
args : (Tensor residual, Tensor out_grad, float delta)
Expand Down Expand Up @@ -2396,7 +2407,7 @@
backward : unsqueeze_double_grad

- backward_api : warpctc_grad
forward : warpctc (Tensor logits, Tensor label, Tensor logits_length, Tensor labels_length, int blank, bool norm_by_times) -> Tensor(loss), Tensor(warpctcgrad)
forward : warpctc (Tensor logits, Tensor label, Tensor logits_length, Tensor labels_length, int blank, bool norm_by_times) -> Tensor(loss), Tensor(warpctcgrad)
args : (Tensor logits, Tensor logits_length, Tensor warpctcgrad, Tensor loss_grad, int blank, bool norm_by_times)
output : Tensor(logits_grad)
infer_meta :
Expand Down
45 changes: 38 additions & 7 deletions python/paddle/fluid/tests/unittests/test_hsigmoid_op.py
Expand Up @@ -172,10 +172,30 @@ def hsigmoidWithCustomTree(x, w, path_table, path_code, label, bias,
return pre_output, out


def python_api(input,
weight,
label,
path_table=None,
path_code=None,
bias=None,
num_classes=-1,
is_sparse=False,
remote_prefetch=False):
assert is_sparse == remote_prefetch, "is_sparse is equal to remote_prefetch in dygraph."
return paddle.nn.functional.hsigmoid_loss(input, label, num_classes, weight,
bias, path_table, path_code,
is_sparse)


python_out_sig = ["Out"]


class TestHSigmoidOp(OpTest):

def setUp(self):
self.op_type = "hierarchical_sigmoid"
self.python_api = python_api
self.python_out_sig = python_out_sig
num_classes = 101
feature_size = 5
batch_size = 20
Expand All @@ -193,11 +213,12 @@ def setUp(self):
self.user_grads = hsigmoid_grad(x, w, label, bias, num_classes)

def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)

def test_check_grad(self):
self.check_grad(['X', 'W', 'Bias'], ['Out'],
user_defined_grads=self.user_grads)
user_defined_grads=self.user_grads,
check_eager=True)


@skip_check_grad_ci(
Expand All @@ -208,6 +229,8 @@ class TestHSigmoidOpSparse(OpTest):

def setUp(self):
self.op_type = "hierarchical_sigmoid"
self.python_api = python_api
self.python_out_sig = python_out_sig
num_classes = 6 #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
feature_size = 8
batch_size = 4
Expand Down Expand Up @@ -237,7 +260,7 @@ def setUp(self):
self.outputs = {'PreOut': pre_output, 'Out': out}

def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)


class TestHSigmoidOpWithSparseGrad(unittest.TestCase):
Expand Down Expand Up @@ -318,6 +341,8 @@ class TestHSigmoidOpWithCostumTree(OpTest):

def setUp(self):
self.op_type = "hierarchical_sigmoid"
self.python_api = python_api
self.python_out_sig = python_out_sig
num_classes = 6 #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
feature_size = 8
batch_size = 4
Expand Down Expand Up @@ -347,10 +372,12 @@ def setUp(self):
self.outputs = {'PreOut': pre_output, 'Out': out}

def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)

def test_check_grad(self):
self.check_grad(['Bias', 'X', 'W'], ['Out'], no_grad_set=set('Label'))
self.check_grad(['Bias', 'X', 'W'], ['Out'],
no_grad_set=set('Label'),
check_eager=True)


@skip_check_grad_ci(
Expand All @@ -361,6 +388,8 @@ class TestHSigmoidOpWithCostumTreeWithoutBias(OpTest):

def setUp(self):
self.op_type = "hierarchical_sigmoid"
self.python_api = python_api
self.python_out_sig = python_out_sig
num_classes = 6 #using 1,2,3,4,5,6 to build a huffman tree and select 1,2,5,6 as sample
feature_size = 8
batch_size = 4
Expand Down Expand Up @@ -394,10 +423,12 @@ def setUp(self):
self.outputs = {'PreOut': pre_output, 'Out': out}

def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)

def test_check_grad(self):
self.check_grad(['X', 'W'], ['Out'], no_grad_set=set('Label'))
self.check_grad(['X', 'W'], ['Out'],
no_grad_set=set('Label'),
check_eager=True)


class TestHSigmoidLossAPI(unittest.TestCase):
Expand Down
6 changes: 5 additions & 1 deletion python/paddle/nn/functional/loss.py
Expand Up @@ -920,7 +920,11 @@ def hsigmoid_loss(input,
# [2.11009121]
# [1.92374969]]
"""

if in_dygraph_mode():
out, _, _ = _C_ops.final_state_hierarchical_sigmoid(
input, weight, label, path_table, path_code, bias, num_classes,
is_sparse, 0, [], [], [], is_sparse)
return out
if _non_static_mode():
out, _, _ = _C_ops.hierarchical_sigmoid(input, weight, label,
path_table, path_code, bias,
Expand Down

0 comments on commit ea91ca2

Please sign in to comment.