Skip to content

Commit

Permalink
[PDPD] support expand_as_v2 and less_equal (#28264)
Browse files Browse the repository at this point in the history
  • Loading branch information
ckl117 and yuxu42 authored Jan 9, 2025
1 parent e12a724 commit 7618310
Show file tree
Hide file tree
Showing 7 changed files with 121 additions and 1 deletion.
4 changes: 4 additions & 0 deletions src/frontends/paddle/src/op/elementwise_ops.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@ NamedOutputs not_equal(const NodeContext& node_context) {
return elementwise_ops<default_opset::NotEqual>(node_context);
}

NamedOutputs less_equal(const NodeContext& node) {
return elementwise_ops<default_opset::LessEqual>(node);
}

NamedOutputs elementwise_floordiv(const NodeContext& node_context) {
auto x = node_context.get_input("X");
auto y = node_context.get_input("Y");
Expand Down
33 changes: 33 additions & 0 deletions src/frontends/paddle/src/op/expand_as_v2.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
// Copyright (C) 2018-2024 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include "default_opset.hpp"
#include "openvino/frontend/paddle/node_context.hpp"

namespace ov {
namespace frontend {
namespace paddle {
namespace op {
NamedOutputs expand_as_v2(const NodeContext& node) {
using namespace default_opset;
auto x = node.get_input("X");
Output<Node> shape_expected_node;
if (node.has_input("Y")) {
shape_expected_node = std::make_shared<ShapeOf>(node.get_input("Y"), element::i32);
} else {
std::vector<int32_t> shape_expected;
if (node.has_attribute("target_shape")) {
shape_expected = node.get_attribute<std::vector<int32_t>>("target_shape");
} else {
throw std::runtime_error("expand: has no target_shape attribute");
}
shape_expected_node = Constant::create(element::i32, {shape_expected.size()}, shape_expected);
}
return node.default_single_output_mapping({std::make_shared<Broadcast>(x, shape_expected_node)}, {"Out"});
}

} // namespace op
} // namespace paddle
} // namespace frontend
} // namespace ov
2 changes: 1 addition & 1 deletion src/frontends/paddle/src/op/expand_v2.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,4 @@ NamedOutputs expand_v2(const NodeContext& node) {
} // namespace op
} // namespace paddle
} // namespace frontend
} // namespace ov
} // namespace ov
4 changes: 4 additions & 0 deletions src/frontends/paddle/src/op_table.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ OP_CONVERTER(elu);
OP_CONVERTER(embedding);
OP_CONVERTER(exp);
OP_CONVERTER(expand_v2);
OP_CONVERTER(expand_as_v2);
OP_CONVERTER(eye);
OP_CONVERTER(flip);
OP_CONVERTER(flatten_contiguous_range);
Expand All @@ -62,6 +63,7 @@ OP_CONVERTER(index_select);
OP_CONVERTER(layer_norm);
OP_CONVERTER(leaky_relu);
OP_CONVERTER(less_than);
OP_CONVERTER(less_equal);
OP_CONVERTER(linear_interp_v2);
OP_CONVERTER(linspace);
OP_CONVERTER(lod_array_length);
Expand Down Expand Up @@ -179,6 +181,7 @@ std::map<std::string, CreatorFunction> get_supported_ops() {
{"equal", op::equal},
{"exp", op::exp},
{"expand_v2", op::expand_v2},
{"expand_as_v2", op::expand_as_v2},
{"eye", op::eye},
{"fill_any_like", op::fill_any_like},
{"fill_constant", op::fill_constant},
Expand All @@ -200,6 +203,7 @@ std::map<std::string, CreatorFunction> get_supported_ops() {
{"layer_norm", op::layer_norm},
{"leaky_relu", op::leaky_relu},
{"less_than", op::less_than},
{"less_equal", op::less_equal},
{"linear_interp_v2", op::linear_interp_v2},
{"linspace", op::linspace},
{"lod_array_length", op::lod_array_length},
Expand Down
5 changes: 5 additions & 0 deletions src/frontends/paddle/tests/op_fuzzy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,8 @@ static const std::vector<std::string> models{
std::string("expand_v2_tensor/expand_v2_tensor.pdmodel"),
std::string("expand_v2_tensor_list/expand_v2_tensor_list.pdmodel"),
std::string("expand_v2_tensor_list2/expand_v2_tensor_list2.pdmodel"),
std::string("expand_as_v2_1/expand_as_v2_1.pdmodel"),
std::string("expand_as_v2_2/expand_as_v2_2.pdmodel"),
std::string("exp_test_float32/exp_test_float32.pdmodel"),
std::string("eye/eye.pdmodel"),
std::string("eye_int32/eye_int32.pdmodel"),
Expand Down Expand Up @@ -283,6 +285,9 @@ static const std::vector<std::string> models{
std::string("less_than_float32/less_than_float32.pdmodel"),
std::string("less_than_int32/less_than_int32.pdmodel"),
std::string("less_than_int64/less_than_int64.pdmodel"),
std::string("less_equal_float32/less_equal_float32.pdmodel"),
std::string("less_equal_int32/less_equal_int32.pdmodel"),
std::string("less_equal_int64/less_equal_int64.pdmodel"),
std::string("linear_downsample_false_0/linear_downsample_false_0.pdmodel"),
std::string("linear_downsample_false_1/linear_downsample_false_1.pdmodel"),
std::string("linear_downsample_true_0/linear_downsample_true_0.pdmodel"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,39 @@ def elementwise_floordiv(name: str, x, y, in_dtype, axis=-1):
return outs[0]


def elementwise_less_equal(name: str, x, y, in_dtype, cast_to_fp32=False):
paddle.enable_static()

with paddle.static.program_guard(paddle.static.Program(), paddle.static.Program()):
node_x = paddle.static.data(
name='input_x', shape=x.shape, dtype=in_dtype)
node_y = paddle.static.data(
name='input_y', shape=y.shape, dtype=in_dtype)
if paddle.__version__ >= '2.0.0':
out = paddle.less_equal(x=node_x, y=node_y, name='less_equal')
else:
out = paddle.fluid.layers.less_equal(x=node_x, y=node_y, name='less_equal')
# FuzzyTest framework doesn't support boolean so cast to fp32/int32

if cast_to_fp32:
in_dtype = "float32"

out = paddle.cast(out, in_dtype)
cpu = paddle.static.cpu_places(1)
exe = paddle.static.Executor(cpu[0])
# startup program will call initializer to initialize the parameters.
exe.run(paddle.static.default_startup_program())

outs = exe.run(
feed={'input_x': x, 'input_y': y},
fetch_list=[out])

saveModel(name, exe, feed_vars=[node_x, node_y], fetchlist=[out],
inputs=[x, y], outputs=[outs[0]], target_dir=sys.argv[1])

return outs[0]


def elementwise_ops(name: str, data_x, data_y, in_dtype, axis=-1):
elementwise_add("elementwise_add" + name, data_x, data_y, in_dtype, axis)
elementwise_sub("elementwise_sub" + name, data_x, data_y, in_dtype, axis)
Expand Down Expand Up @@ -350,6 +383,20 @@ def main():
data_y = np.random.choice(sample_arr, size=(1, 3, 4))
elementwise_mul_bool("elementwise_mul_bool1", data_x, data_y)

test_cases = [
"float32",
"int32",
"int64"
]

for test in test_cases:
x = np.array([0, 1, 2, 3]).astype(test)
y = np.array([1, 0, 2, 4]).astype(test)
if ((test == "float64") or (test == "int64")):
elementwise_less_equal("less_equal_" + test, x, y, test, True)
else:
elementwise_less_equal("less_equal_" + test, x, y, test, False)


if __name__ == "__main__":
main()
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,27 @@ def expand_v2_tensor(name:str, x, out_shape, use_tensor_in_list):

return outs[0]

def expand_as_v2(name:str, x, y):
paddle.enable_static()

with paddle.static.program_guard(paddle.static.Program(), paddle.static.Program()):
node_x = paddle.static.data(name='x', shape=x.shape, dtype=data_type)
node_y = paddle.static.data(name='y', shape=y.shape, dtype=data_type)
out = paddle.expand_as(node_x, node_y, name='expand_as_v2')

cpu = paddle.static.cpu_places(1)
exe = paddle.static.Executor(cpu[0])
# startup program will call initializer to initialize the parameters.
exe.run(paddle.static.default_startup_program())

outs = exe.run(
feed={'x': x, 'y': y},
fetch_list=[out])

saveModel(name, exe, feed_vars=[node_x, node_y], fetchlist=[out],
inputs=[x, y], outputs=[outs[0]], target_dir=sys.argv[1])

return outs[0]

def main():
data = np.random.rand(1, 1, 6).astype(data_type)
Expand All @@ -70,6 +91,12 @@ def main():
expand_v2_tensor("expand_v2_tensor_list", data, [2, 3, -1], True)
expand_v2_tensor("expand_v2_tensor_list2", data, [2, 2, 2, 3, -1], True)

# expand_as_v2
data_x = np.random.rand(1, 1, 6).astype(data_type)
data_y1 = np.random.rand(2, 3, 6).astype(data_type)
data_y2 = np.random.rand(4, 2, 3, 6).astype(data_type)
expand_as_v2("expand_as_v2_1", data_x, data_y1)
expand_as_v2("expand_as_v2_2", data_x, data_y2)

if __name__ == "__main__":
main()

0 comments on commit 7618310

Please sign in to comment.