Skip to content

Commit

Permalink
Implement Relu by WebNN API
Browse files Browse the repository at this point in the history
Update dnn.cpp for better test

Update elementwise_layers.cpp

Implement ReLU6

Update elementwise_layers.cpp

Implement SoftMax using WebNN API

Implement Reshape by WebNN API

Implement PermuteLayer by WebNN API

Implement PoolingLayer using WebNN API

Update pooling_layer.cpp

Update pooling_layer.cpp

Update pooling_layer.cpp

Update pooling_layer.cpp

Update pooling_layer.cpp

Update pooling_layer.cpp

Implement poolingLayer by WebNN API and add more detailed logs

Update dnn.cpp

Update dnn.cpp

Remove redundant codes and add more logs for poolingLayer

Add more logs in the pooling layer implementation

Fix the indent issue and resolve the compiling issue

Fix the build problems

Fix the build issue

FIx the build issue

Update dnn.cpp

Update dnn.cpp
  • Loading branch information
huningxin authored and Hanxi Guo committed Aug 2, 2021
1 parent eee8a28 commit f5083c8
Show file tree
Hide file tree
Showing 11 changed files with 410 additions and 23 deletions.
2 changes: 1 addition & 1 deletion modules/dnn/include/opencv2/dnn/dnn.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ CV__DNN_INLINE_NS_BEGIN
//!< @sa setInferenceEngineBackendType
DNN_BACKEND_OPENCV,
DNN_BACKEND_VKCOM,
DNN_BACKEND_WEBNN,
DNN_BACKEND_CUDA,
DNN_BACKEND_WEBNN,
#ifdef __OPENCV_BUILD
DNN_BACKEND_INFERENCE_ENGINE_NGRAPH = 1000000, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType()
DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType()
Expand Down
14 changes: 10 additions & 4 deletions modules/dnn/src/dnn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ class BackendRegistry
if (haveWebnn())
{
backends.push_back(std::make_pair(DNN_BACKEND_WEBNN, DNN_TARGET_CPU));
backends.push_back(std::make_pair(DNN_BACKEND_WEBNN, DNN_TARGET_OPENCL));
}
#endif // HAVE_WEBNN

Expand Down Expand Up @@ -2453,6 +2452,11 @@ struct Net::Impl : public detail::NetImplBase
Ptr<Layer> layer = ld.layerInstance;
if (!fused && !layer->supportBackend(preferableBackend))
{
// For test use. when not using WebNN, the test case will fail
// with the following code.

CV_LOG_WARNING(NULL, "Layer " + ld.type + " name " + ld.name + " is unsupported by WebNN backend.");

addWebnnOutputs(ld);
net = Ptr<WebnnNet>();
layer->preferableTarget = DNN_TARGET_CPU;
Expand All @@ -2469,7 +2473,7 @@ struct Net::Impl : public detail::NetImplBase
}
continue;
}
ld.skip = true; // Initially skip all WebNN supported layers.
ld.skip = true; // Initially skip all WebNN supported layers.

// Create a new network if one of inputs from different WebNN graph.
std::vector<Ptr<BackendNode>> inputNodes;
Expand Down Expand Up @@ -2519,7 +2523,9 @@ struct Net::Impl : public detail::NetImplBase

auto inps = net->setInputs(inputs, inputNames);
for (auto& inp : inps) {
inputNodes.emplace_back(Ptr<BackendNode>(new WebnnBackendNode(inp)));
WebnnBackendNode* node = new WebnnBackendNode(inp);
node->net = net;
inputNodes.emplace_back(Ptr<BackendNode>(node));
}
}
}
Expand Down Expand Up @@ -2619,8 +2625,8 @@ struct Net::Impl : public detail::NetImplBase
ld.skip = false;
}
}
#endif
}
#endif

void initVkComBackend()
{
Expand Down
149 changes: 149 additions & 0 deletions modules/dnn/src/layers/elementwise_layers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,11 @@
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include "../op_vkcom.hpp"
#include "../op_webnn.hpp"

#include <opencv2/dnn/shape_utils.hpp>
#include <iostream>
#include <limits>

#ifdef HAVE_OPENCL
#include "opencl_kernels_dnn.hpp"
Expand All @@ -59,6 +61,7 @@
#include "../cuda4dnn/primitives/activation.hpp"
using namespace cv::dnn::cuda4dnn;
#endif
#include <opencv2/core/utils/logger.hpp>

namespace cv
{
Expand Down Expand Up @@ -181,6 +184,17 @@ class ElementWiseLayer : public Func::Layer
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
virtual Ptr<BackendNode> initWebnn(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
Ptr<WebnnBackendNode> node = nodes[0].dynamicCast<WebnnBackendNode>();
auto& webnnInpOperand = node->operand;
auto& webnnGraphBuilder = node->net->builder;
auto operand = func.initWebnnAPI(webnnGraphBuilder, webnnInpOperand);
return Ptr<BackendNode>(new WebnnBackendNode(operand));
}
#endif

virtual Ptr<BackendNode> initVkCom(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
#ifdef HAVE_VULKAN
Expand Down Expand Up @@ -306,6 +320,16 @@ struct ReLUFunctor : public BaseFunctor
#ifdef HAVE_DNN_NGRAPH
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
return true;
#endif
#ifdef HAVE_WEBNN
if (backendId == DNN_BACKEND_WEBNN) {
// TODO: support PRELU
if (slope != 0)
{
CV_LOG_WARNING(NULL, "PRELU is not supported now.");
}
return slope == 0;
}
#endif
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_CUDA ||
Expand Down Expand Up @@ -428,6 +452,13 @@ struct ReLUFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
return builder.Relu(input);
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -455,6 +486,7 @@ struct ReLU6Functor : public BaseFunctor
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_CUDA ||
backendId == DNN_BACKEND_HALIDE ||
backendId == DNN_BACKEND_WEBNN ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}

Expand Down Expand Up @@ -551,6 +583,33 @@ struct ReLU6Functor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH



#ifdef HAVE_WEBNN
ml::Operand BuildConstant(const ml::GraphBuilder& builder,
const std::vector<int32_t>& dimensions,
const void* value,
size_t size,
ml::OperandType type) {
ml::OperandDescriptor desc;
desc.type = type;
desc.dimensions = dimensions.data();
desc.dimensionsCount = (uint32_t)dimensions.size();
ml::ArrayBufferView resource;
resource.buffer = const_cast<void*>(value);
resource.byteLength = size;
return builder.Constant(&desc, &resource);
}

ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
ml::ClampOptions clampOptions;
clampOptions.minValue = BuildConstant(builder, {}, &minValue, 1 * sizeof(float), ml::OperandType::Float32);
clampOptions.maxValue = BuildConstant(builder, {}, &maxValue, 1 * sizeof(float), ml::OperandType::Float32);
return builder.Clamp(input, &clampOptions);
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -643,6 +702,15 @@ struct TanHFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -735,6 +803,15 @@ struct SwishFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -840,6 +917,15 @@ struct MishFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -932,6 +1018,15 @@ struct SigmoidFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -1024,6 +1119,15 @@ struct ELUFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -1122,6 +1226,15 @@ struct AbsValFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -1215,6 +1328,15 @@ struct BNLLFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -1367,6 +1489,15 @@ struct PowerFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -1507,6 +1638,15 @@ struct ExpFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down Expand Up @@ -1644,6 +1784,15 @@ struct ChannelsPReLUFunctor : public BaseFunctor
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
ml::Operand initWebnnAPI(const ml::GraphBuilder& builder, const ml::Operand& input)
{
CV_Error(Error::StsNotImplemented, "");
ml::Operand operand;
return operand;
}
#endif

#ifdef HAVE_VULKAN
std::shared_ptr<vkcom::OpBase> initVkCom()
{
Expand Down
16 changes: 16 additions & 0 deletions modules/dnn/src/layers/permute_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include "../op_vkcom.hpp"
#include "../op_webnn.hpp"

#include <float.h>
#include <algorithm>
Expand Down Expand Up @@ -119,6 +120,7 @@ class PermuteLayerImpl CV_FINAL : public PermuteLayer
#endif
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_CUDA ||
backendId == DNN_BACKEND_WEBNN ||
((backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && haveInfEngine()) ||
(backendId == DNN_BACKEND_VKCOM && haveVulkan());
}
Expand Down Expand Up @@ -413,6 +415,20 @@ class PermuteLayerImpl CV_FINAL : public PermuteLayer
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
virtual Ptr<BackendNode> initWebnn(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
Ptr<WebnnBackendNode> node = nodes[0].dynamicCast<WebnnBackendNode>();
auto& webnnInpOperand = node->operand;
auto& webnnGraphBuilder = node->net->builder;
std::vector<int32_t> permutation(_order.begin(), _order.end());
ml::TransposeOptions options;
options.permutation = permutation.data();
options.permutationCount = permutation.size();
auto operand = webnnGraphBuilder.Transpose(webnnInpOperand, &options);
return Ptr<BackendNode>(new WebnnBackendNode(operand));
}
#endif

#ifdef HAVE_CUDA
Ptr<BackendNode> initCUDA(
Expand Down
Loading

0 comments on commit f5083c8

Please sign in to comment.