Skip to content

Commit

Permalink
[IPU] add more ipu UTs (#41176)
Browse files Browse the repository at this point in the history
* add ipu uts

* fix ut

* split PR

* fix ut

* rm ut
  • Loading branch information
gglin001 committed Apr 6, 2022
1 parent e9e68c3 commit 5b85f3d
Show file tree
Hide file tree
Showing 7 changed files with 799 additions and 1 deletion.
99 changes: 99 additions & 0 deletions python/paddle/fluid/tests/unittests/ipu/test_cast_op_ipu.py
Expand Up @@ -94,6 +94,105 @@ def test_base(self):
self.assertTrue(res0.shape == res1.shape)


class TestEnableFp16(TestBase):
def set_atol(self):
self.atol = 1e-10

def set_data_feed(self):
self.feed = {"x": np.array([1, 200, 3000, 40000]).astype('int32'), }

def set_op_attrs(self):
self.attrs = {}
self.attrs['dtype'] = 'float32'

def _test_base(self, run_ipu=True):
scope = paddle.static.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
main_prog.random_seed = self.SEED
startup_prog.random_seed = self.SEED

with paddle.static.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
out = paddle.cast(x, **self.attrs)
fetch_list = [out.name]

if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)

if run_ipu:
feed_list = self.feed_list
ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(is_training=self.is_training)
ipu_strategy.set_precision_config(enable_fp16=True)
program = paddle.static.IpuCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog

result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]


class TestDisableTransferCast(TestEnableFp16):
def set_atol(self):
self.atol = 1e-10

def set_data_feed(self):
self.feed = {"x": np.array([1, 200, 3000, 40000]).astype('int32'), }

def set_op_attrs(self):
self.attrs = {}
self.attrs['dtype'] = 'float32'

def _test_base(self, run_ipu=True):
scope = paddle.static.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
main_prog.random_seed = self.SEED
startup_prog.random_seed = self.SEED

with paddle.static.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
out = paddle.cast(x, **self.attrs)
fetch_list = [out.name]

if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)

if run_ipu:
feed_list = self.feed_list
ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(is_training=self.is_training)
ipu_strategy.set_precision_config(enable_fp16=True)
ipu_strategy.set_options({"transfer_cast_op": False})
program = paddle.static.IpuCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog

result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]


class TestCase2(TestBase):
def set_atol(self):
self.atol = 1e-10
Expand Down
126 changes: 126 additions & 0 deletions python/paddle/fluid/tests/unittests/ipu/test_eval_model_ipu.py
@@ -0,0 +1,126 @@
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest


@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_data_feed()
self.set_feed_attr()
self.set_attrs()

def set_atol(self):
self.atol = 1e-4

def set_data_feed(self):
self.feed = {
"image": np.random.uniform(size=[1, 3, 10, 10]).astype('float32'),
}

def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [x.dtype for x in self.feed.values()]

def set_attrs(self):
self.attrs = {
"optimizer": 'lamb',
"weight_decay": 2.0,
}

def _test_optimizer(self, run_ipu=True):
scope = paddle.static.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
main_prog.random_seed = self.SEED
startup_prog.random_seed = self.SEED
np.random.seed(self.SEED)

with paddle.static.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
image = paddle.static.data(
name='image', shape=[1, 3, 10, 10], dtype='float32')
conv1 = paddle.static.nn.conv2d(
image, num_filters=3, filter_size=3, bias_attr=False)
loss = paddle.mean(conv1)

weight_decay = self.attrs['weight_decay']
opt = paddle.optimizer.SGD(learning_rate=1e-1,
weight_decay=weight_decay)
if self.attrs['optimizer'] == 'adam':
opt = paddle.optimizer.Adam(
learning_rate=1e-1, weight_decay=weight_decay)
elif self.attrs['optimizer'] == 'lamb':

opt = paddle.optimizer.Lamb(
learning_rate=1e-1, lamb_weight_decay=weight_decay)
opt.minimize(loss)

if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)

if run_ipu:
feed_list = [image.name]
fetch_list = [loss.name]
ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(is_training=True)
ipu_strategy.set_options({"runtime_options.enable_eval": True})
program = paddle.static.IpuCompiledProgram(
main_prog, ipu_strategy=ipu_strategy).compile(feed_list,
fetch_list)
else:
program = main_prog

result = []
if run_ipu:
for epoch in range(200):
if epoch == 100:
ipu_strategy.set_options({
"runtime_options.enable_eval": False
})
loss_res = exe.run(program,
feed=self.feed,
fetch_list=[loss])
result.append(loss_res)
else:
for epoch in range(100):
loss_res = exe.run(program,
feed=self.feed,
fetch_list=[loss])
result.append(loss_res)
return np.array(result)

def test(self):
# cpu and ipu dimenstion mismatch, cpu:(100, 1, 1), ipu:(100, 1)
ipu_loss = self._test_optimizer(True).flatten()
cpu_loss = self._test_optimizer(False).flatten()
self.assertTrue(ipu_loss[0] == ipu_loss[99])
self.assertTrue(np.allclose(ipu_loss[100:], cpu_loss, atol=self.atol))


if __name__ == "__main__":
unittest.main()

0 comments on commit 5b85f3d

Please sign in to comment.