Skip to content

Commit

Permalink
Fix errors after rebasing upstream master
Browse files Browse the repository at this point in the history
  • Loading branch information
Hanxi Guo committed Aug 26, 2021
1 parent c80984a commit 9bd5505
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 81 deletions.
20 changes: 0 additions & 20 deletions modules/dnn/src/layers/batch_norm_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -411,26 +411,6 @@ class BatchNormLayerImpl CV_FINAL : public BatchNormLayer
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
virtual Ptr<BackendNode> initWebnn(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
Ptr<WebnnBackendNode> node = nodes[0].dynamicCast<WebnnBackendNode>();
auto& webnnInpOperand = node->operand;
auto& webnnGraphBuilder = node->net->builder;
std::vector<int32_t> weights_shape = webnn::getShape(weights_);
ml::Operand weights = webnn::BuildConstant(webnnGraphBuilder, weights_shape, weights_.data, weights_.total()*weights_.elemSize(), ml::OperandType::Float32);
std::vector<int32_t> shape(dims, 1);
shape[1] = weights_shape[1];
ml::Operand weights_reshaped = webnnGraphBuilder.Reshape(weights, shape.data(), shape.size());
ml::Operand mul_res = webnnGraphBuilder.Mul(webnnInpOperand, weights_reshaped);
std::vector<int32_t> bias_shape = webnn::getShape(bias_);
ml::Operand bias = webnn::BuildConstant(webnnGraphBuilder, bias_shape, bias_.data, bias_.total()*bias_.elemSize(), ml::OperandType::Float32);
shape[1] = bias_shape[1];
ml::Operand bias_reshaped = webnnGraphBuilder.Reshape(bias, shape.data(), shape.size());
ml::Operand add_res = webnnGraphBuilder.Add(mul_res, bias_reshaped);
return Ptr<BackendNode>(new WebnnBackendNode(add_res));
}
#endif

virtual bool tryQuantize(const std::vector<std::vector<float> > &scales,
const std::vector<std::vector<int> > &zeropoints, LayerParams& params) CV_OVERRIDE
Expand Down
14 changes: 0 additions & 14 deletions modules/dnn/src/layers/concat_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -403,20 +403,6 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
virtual Ptr<BackendNode> initWebnn(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
Ptr<WebnnBackendNode> node = nodes[0].dynamicCast<WebnnBackendNode>();
auto& webnnGraphBuilder = node->net->builder;
std::vector<ml::Operand> inputsOperand;
for (int i = 0; i < nodes.size(); i++)
{
inputsOperand.push_back(nodes[i].dynamicCast<WebnnBackendNode>()->operand);
}
auto operand = webnnGraphBuilder.Concat(inputsOperand.size(), inputsOperand.data(), axis);
return Ptr<BackendNode>(new WebnnBackendNode(operand));
}
#endif

virtual bool tryQuantize(const std::vector<std::vector<float> > &scales,
const std::vector<std::vector<int> > &zeropoints, LayerParams& params) CV_OVERRIDE
Expand Down
35 changes: 0 additions & 35 deletions modules/dnn/src/layers/fully_connected_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -620,41 +620,6 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
virtual Ptr<BackendNode> initWebnn(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
Ptr<WebnnBackendNode> node = nodes[0].dynamicCast<WebnnBackendNode>();
auto& webnnInpOperand = node->operand;
auto& webnnGraphBuilder = node->net->builder;
ml::GemmOptions gemmOptions = {};
if (bias)
{
std::vector<int32_t> biasDims = {(int32_t)blobs[1].size[1]};
ml::Operand bias = webnn::BuildConstant(webnnGraphBuilder, biasDims, blobs[1].data, blobs[1].total()*blobs[1].elemSize(), ml::OperandType::Float32);
gemmOptions.c = bias;
}
ml::Operand result = nullptr;
if (nodes.size() == 2)
{
auto& inp2 = nodes[1].dynamicCast<WebnnBackendNode>()->operand;
result = webnnGraphBuilder.Gemm(webnnInpOperand, inp2, &gemmOptions);
}
else
{
std::vector<int32_t> input_shape(2, -1);
input_shape[1] = blobs[0].size[1];
// std::cout<<"input size: "<<input_shape[0]<<" "<<input_shape[1]<<std::endl;
ml::Operand webnnInpOperand_reshaped = webnnGraphBuilder.Reshape(webnnInpOperand, input_shape.data(), input_shape.size());
std::vector<int32_t> weight_shape = {(int32_t)blobs[0].size[0], (int32_t)blobs[0].size[1]};
// std::cout<<"weight size: "<<weight_shape[1]<<" "<<weight_shape[0]<<std::endl;
ml::Operand inp2 = webnn::BuildConstant(webnnGraphBuilder, weight_shape, blobs[0].data, blobs[0].total()*blobs[0].elemSize(), ml::OperandType::Float32);
gemmOptions.bTranspose = true;
result = webnnGraphBuilder.Gemm(webnnInpOperand_reshaped, inp2, &gemmOptions);
}
return Ptr<BackendNode>(new WebnnBackendNode(result));
}
#endif // HAVE_WEBNN

virtual bool tryQuantize(const std::vector<std::vector<float> > &scales,
const std::vector<std::vector<int> > &zeropoints, LayerParams& params) CV_OVERRIDE
{
Expand Down
12 changes: 0 additions & 12 deletions modules/dnn/src/layers/softmax_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -386,18 +386,6 @@ class SoftMaxLayerImpl CV_FINAL : public SoftmaxLayer
}
#endif // HAVE_DNN_NGRAPH

#ifdef HAVE_WEBNN
virtual Ptr<BackendNode> initWebnn(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
Ptr<WebnnBackendNode> node = nodes[0].dynamicCast<WebnnBackendNode>();
auto& webnnInpOperand = node->operand;
auto& webnnGraphBuilder = node->net->builder;
auto operand = webnnGraphBuilder.Softmax(webnnInpOperand);
return Ptr<BackendNode>(new WebnnBackendNode(operand));
}

#endif

virtual bool tryQuantize(const std::vector<std::vector<float> > &scales,
const std::vector<std::vector<int> > &zeropoints, LayerParams& params) CV_OVERRIDE
{
Expand Down

0 comments on commit 9bd5505

Please sign in to comment.