diff --git a/modules/dnn/src/layers/batch_norm_layer.cpp b/modules/dnn/src/layers/batch_norm_layer.cpp index c6a016f7c281..d3fa4f6e337b 100644 --- a/modules/dnn/src/layers/batch_norm_layer.cpp +++ b/modules/dnn/src/layers/batch_norm_layer.cpp @@ -411,6 +411,27 @@ class BatchNormLayerImpl CV_FINAL : public BatchNormLayer } #endif // HAVE_DNN_NGRAPH +#ifdef HAVE_WEBNN + virtual Ptr initWebnn(const std::vector >& inputs, const std::vector >& nodes) CV_OVERRIDE + { + Ptr node = nodes[0].dynamicCast(); + auto& webnnInpOperand = node->operand; + auto& webnnGraphBuilder = node->net->builder; + std::vector weights_shape = webnn::getShape(weights_); + ml::Operand weights = webnn::BuildConstant(webnnGraphBuilder, weights_shape, weights_.data, weights_.total()*weights_.elemSize(), ml::OperandType::Float32); + std::vector shape(dims, 1); + shape[1] = weights_shape[1]; + ml::Operand weights_reshaped = webnnGraphBuilder.Reshape(weights, shape.data(), shape.size()); + ml::Operand mul_res = webnnGraphBuilder.Mul(webnnInpOperand, weights_reshaped); + std::vector bias_shape = webnn::getShape(bias_); + ml::Operand bias = webnn::BuildConstant(webnnGraphBuilder, bias_shape, bias_.data, bias_.total()*bias_.elemSize(), ml::OperandType::Float32); + shape[1] = bias_shape[1]; + ml::Operand bias_reshaped = webnnGraphBuilder.Reshape(bias, shape.data(), shape.size()); + ml::Operand add_res = webnnGraphBuilder.Add(mul_res, bias_reshaped); + return Ptr(new WebnnBackendNode(add_res)); + } +#endif + virtual bool tryQuantize(const std::vector > &scales, const std::vector > &zeropoints, LayerParams& params) CV_OVERRIDE { diff --git a/modules/dnn/src/layers/concat_layer.cpp b/modules/dnn/src/layers/concat_layer.cpp index f620d66a39da..6fdb9af1c416 100644 --- a/modules/dnn/src/layers/concat_layer.cpp +++ b/modules/dnn/src/layers/concat_layer.cpp @@ -403,6 +403,21 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer } #endif // HAVE_DNN_NGRAPH +#ifdef HAVE_WEBNN + virtual Ptr initWebnn(const std::vector >& inputs, const std::vector >& nodes) CV_OVERRIDE + { + Ptr node = nodes[0].dynamicCast(); + auto& webnnGraphBuilder = node->net->builder; + std::vector inputsOperand; + for (int i = 0; i < nodes.size(); i++) + { + inputsOperand.push_back(nodes[i].dynamicCast()->operand); + } + auto operand = webnnGraphBuilder.Concat(inputsOperand.size(), inputsOperand.data(), axis); + return Ptr(new WebnnBackendNode(operand)); + } +#endif + virtual bool tryQuantize(const std::vector > &scales, const std::vector > &zeropoints, LayerParams& params) CV_OVERRIDE { diff --git a/modules/dnn/src/layers/fully_connected_layer.cpp b/modules/dnn/src/layers/fully_connected_layer.cpp index cf5f7135c22f..aa61b17200b5 100644 --- a/modules/dnn/src/layers/fully_connected_layer.cpp +++ b/modules/dnn/src/layers/fully_connected_layer.cpp @@ -620,6 +620,41 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer } #endif // HAVE_DNN_NGRAPH +#ifdef HAVE_WEBNN + virtual Ptr initWebnn(const std::vector >& inputs, const std::vector >& nodes) CV_OVERRIDE + { + Ptr node = nodes[0].dynamicCast(); + auto& webnnInpOperand = node->operand; + auto& webnnGraphBuilder = node->net->builder; + ml::GemmOptions gemmOptions = {}; + if (bias) + { + std::vector biasDims = {(int32_t)blobs[1].size[1]}; + ml::Operand bias = webnn::BuildConstant(webnnGraphBuilder, biasDims, blobs[1].data, blobs[1].total()*blobs[1].elemSize(), ml::OperandType::Float32); + gemmOptions.c = bias; + } + ml::Operand result = nullptr; + if (nodes.size() == 2) + { + auto& inp2 = nodes[1].dynamicCast()->operand; + result = webnnGraphBuilder.Gemm(webnnInpOperand, inp2, &gemmOptions); + } + else + { + std::vector input_shape(2, -1); + input_shape[1] = blobs[0].size[1]; + // std::cout<<"input size: "< weight_shape = {(int32_t)blobs[0].size[0], (int32_t)blobs[0].size[1]}; + // std::cout<<"weight size: "<(new WebnnBackendNode(result)); + } +#endif // HAVE_WEBNN + virtual bool tryQuantize(const std::vector > &scales, const std::vector > &zeropoints, LayerParams& params) CV_OVERRIDE { diff --git a/modules/dnn/src/layers/softmax_layer.cpp b/modules/dnn/src/layers/softmax_layer.cpp index db2951808ffd..7fabff479917 100644 --- a/modules/dnn/src/layers/softmax_layer.cpp +++ b/modules/dnn/src/layers/softmax_layer.cpp @@ -386,6 +386,18 @@ class SoftMaxLayerImpl CV_FINAL : public SoftmaxLayer } #endif // HAVE_DNN_NGRAPH +#ifdef HAVE_WEBNN + virtual Ptr initWebnn(const std::vector >& inputs, const std::vector >& nodes) CV_OVERRIDE + { + Ptr node = nodes[0].dynamicCast(); + auto& webnnInpOperand = node->operand; + auto& webnnGraphBuilder = node->net->builder; + auto operand = webnnGraphBuilder.Softmax(webnnInpOperand); + return Ptr(new WebnnBackendNode(operand)); + } + +#endif + virtual bool tryQuantize(const std::vector > &scales, const std::vector > &zeropoints, LayerParams& params) CV_OVERRIDE {