Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 13 additions & 8 deletions modules/dnn/src/ie_ngraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -383,11 +383,17 @@ class InfEngineNgraphExtension : public InferenceEngine::IExtension

#endif // OpenVINO >= 2022.1

InfEngineNgraphNode::InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node)
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(std::move(_node)) {}
InfEngineNgraphNode::InfEngineNgraphNode(ngraph::Output<ngraph::Node>&& _node)
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(std::move(_node)) {
CV_Assert(node.get_node());
CV_Assert(node.get_node_shared_ptr());
}

InfEngineNgraphNode::InfEngineNgraphNode(const std::shared_ptr<ngraph::Node>& _node)
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(_node) {}
InfEngineNgraphNode::InfEngineNgraphNode(const ngraph::Output<ngraph::Node>& _node)
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(_node) {
CV_Assert(node.get_node());
CV_Assert(node.get_node_shared_ptr());
}

InfEngineNgraphNode::InfEngineNgraphNode(const std::vector<Ptr<BackendNode> >& nodes,
Ptr<Layer>& cvLayer_, std::vector<Mat*>& inputs,
Expand Down Expand Up @@ -420,7 +426,7 @@ InfEngineNgraphNode::InfEngineNgraphNode(const std::vector<Ptr<BackendNode> >& n
}

void InfEngineNgraphNode::setName(const std::string& name) {
node->set_friendly_name(name);
node.get_node()->set_friendly_name(name);
}

InfEngineNgraphNet::InfEngineNgraphNet(detail::NetImplBase& netImpl)
Expand All @@ -441,8 +447,7 @@ InfEngineNgraphNet::InfEngineNgraphNet(detail::NetImplBase& netImpl, InferenceEn
void InfEngineNgraphNet::addOutput(const Ptr<InfEngineNgraphNode>& node)
{
CV_Assert(node);
CV_Assert(node->node);
const std::string& name = node->node->get_friendly_name();
const std::string& name = node->node.get_node()->get_friendly_name();
requestedOutputs.insert({name, node.get()});
}

Expand All @@ -458,7 +463,7 @@ void InfEngineNgraphNet::createNet(Target targetId) {
CV_Assert(output_node_it->second);
auto out = std::make_shared<ngraph::op::Result>(output_node_it->second->node);
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2022_1)
out->set_friendly_name(output_node_it->first + (output_node_it->second->node->get_output_size() == 1 ? "" : ".0"));
out->set_friendly_name(output_node_it->first + (output_node_it->second->node.get_node()->get_output_size() == 1 ? "" : ".0"));
#endif
outs.push_back(out);
}
Expand Down
6 changes: 3 additions & 3 deletions modules/dnn/src/ie_ngraph.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,13 @@ class InfEngineNgraphNode : public BackendNode
std::vector<Mat*>& inputs, std::vector<Mat>& outputs,
std::vector<Mat>& internals);

InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node);
InfEngineNgraphNode(const std::shared_ptr<ngraph::Node>& _node);
InfEngineNgraphNode(ngraph::Output<ngraph::Node>&& _node);
InfEngineNgraphNode(const ngraph::Output<ngraph::Node>& _node);

void setName(const std::string& name);

// Inference Engine network object that allows to obtain the outputs of this layer.
std::shared_ptr<ngraph::Node> node;
ngraph::Output<ngraph::Node> node;
Ptr<InfEngineNgraphNet> net;
Ptr<dnn::Layer> cvLayer;
};
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/batch_norm_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ class BatchNormLayerImpl CV_FINAL : public BatchNormLayer
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
std::vector<size_t> shape(ieInpNode->get_shape().size(), 1);
std::vector<size_t> shape(ieInpNode.get_shape().size(), 1);
shape[1] = weights_.total();
auto weight = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, ngraph::Shape(shape), weights_.data);
auto bias = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, ngraph::Shape(shape), bias_.data);
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/blank_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ class BlankLayerImpl CV_FINAL : public BlankLayer
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto& ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
auto ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
ngraph::OutputVector inp{ieInpNode};
auto blank = std::make_shared<ngraph::op::Concat>(inp, 0);
return Ptr<BackendNode>(new InfEngineNgraphNode(blank));
Expand Down
4 changes: 2 additions & 2 deletions modules/dnn/src/layers/concat_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
const int numDims = nodes[0].dynamicCast<InfEngineNgraphNode>()->node->get_shape().size();
const int numDims = nodes[0].dynamicCast<InfEngineNgraphNode>()->node.get_shape().size();
const int cAxis = normalize_axis(axis, numDims);
std::vector<size_t> maxDims(numDims, 0);

Expand All @@ -403,7 +403,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
auto inp = nodes[i].dynamicCast<InfEngineNgraphNode>()->node;
inp_nodes.push_back(inp);

std::vector<size_t> inpShape = inp->get_shape();
std::vector<size_t> inpShape = inp.get_shape();
for (int i = 0; i < numDims; ++i)
maxDims[i] = std::max(maxDims[i], inpShape[i]);
}
Expand Down
8 changes: 4 additions & 4 deletions modules/dnn/src/layers/convolution_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -822,13 +822,13 @@ class ConvolutionLayerImpl CV_FINAL : public BaseConvolutionLayerImpl
CV_Assert(!blobs.empty());
CV_Assert_N(inputs.size() >= 1, nodes.size() >= 1);
auto& ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
std::vector<size_t> dims = ieInpNode->get_shape();
std::vector<size_t> dims = ieInpNode.get_shape();
CV_Check(dims.size(), dims.size() >= 3 && dims.size() <= 5, "");
std::shared_ptr<ngraph::Node> ieWeights = nodes.size() > 1 ? nodes[1].dynamicCast<InfEngineNgraphNode>()->node : nullptr;
ngraph::Output<ngraph::Node> ieWeights;
if (nodes.size() > 1)
CV_Assert(ieWeights); // dynamic_cast should not fail
ieWeights = nodes[1].dynamicCast<InfEngineNgraphNode>()->node;
const int inpCn = dims[1];
const int inpGroupCn = nodes.size() > 1 ? ieWeights->get_shape()[1] : blobs[0].size[1];
const int inpGroupCn = nodes.size() > 1 ? ieWeights.get_shape()[1] : blobs[0].size[1];
const int group = inpCn / inpGroupCn;

std::vector<size_t> kernel_shape;
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/crop_and_resize_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ class CropAndResizeLayerImpl CV_FINAL : public CropAndResizeLayer
auto input = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
auto rois = nodes[1].dynamicCast<InfEngineNgraphNode>()->node;

auto rois_shape = rois->get_shape();
auto rois_shape = rois.get_shape();
std::vector<int64_t> dims(rois_shape.begin(), rois_shape.end()), offsets(4, 0);
offsets[3] = 2;
dims[3] = 7;
Expand Down
28 changes: 14 additions & 14 deletions modules/dnn/src/layers/elementwise_layers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@ struct ReLUFunctor : public BaseFunctor
#endif

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
if (slope) {
auto param = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, ngraph::Shape{1}, &slope);
Expand Down Expand Up @@ -674,7 +674,7 @@ struct ReLU6Functor : public BaseFunctor


#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
return std::make_shared<ngraph::op::Clamp>(node, minValue, maxValue);
}
Expand Down Expand Up @@ -796,7 +796,7 @@ struct BaseDefaultFunctor : public BaseFunctor
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
CV_Error(Error::StsNotImplemented, "");
}
Expand Down Expand Up @@ -929,7 +929,7 @@ struct TanHFunctor : public BaseDefaultFunctor<TanHFunctor>
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
return std::make_shared<ngraph::op::Tanh>(node);
}
Expand Down Expand Up @@ -998,7 +998,7 @@ struct SwishFunctor : public BaseDefaultFunctor<SwishFunctor>
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
auto sigmoid = std::make_shared<ngraph::op::Sigmoid>(node);
return std::make_shared<ngraph::op::v1::Multiply>(node, sigmoid);
Expand Down Expand Up @@ -1074,7 +1074,7 @@ struct MishFunctor : public BaseDefaultFunctor<MishFunctor>
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
float one = 1.0f;
auto constant = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, ngraph::Shape{1}, &one);
Expand Down Expand Up @@ -1157,7 +1157,7 @@ struct SigmoidFunctor : public BaseDefaultFunctor<SigmoidFunctor>
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
return std::make_shared<ngraph::op::Sigmoid>(node);
}
Expand Down Expand Up @@ -1237,7 +1237,7 @@ struct ELUFunctor : public BaseDefaultFunctor<ELUFunctor>
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
return std::make_shared<ngraph::op::Elu>(node, alpha);
}
Expand Down Expand Up @@ -1307,7 +1307,7 @@ struct AbsValFunctor : public BaseDefaultFunctor<AbsValFunctor>
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
float coeff = -0.999999f;
// float coeff = preferableTarget == DNN_TARGET_MYRIAD ? -0.999f : -0.999999f;
Expand Down Expand Up @@ -1603,7 +1603,7 @@ struct SqrtFunctor : public BaseDefaultFunctor<SqrtFunctor>
#endif // HAVE_HALIDE

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
return std::make_shared<ngraph::op::v0::Sqrt>(node);
}
Expand Down Expand Up @@ -2329,7 +2329,7 @@ struct PowerFunctor : public BaseFunctor
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
auto scale_node = std::make_shared<ngraph::op::Constant>(ngraph::element::f32,
ngraph::Shape{1}, &scale);
Expand Down Expand Up @@ -2439,7 +2439,7 @@ struct ExpFunctor : public BaseDefaultFunctor<ExpFunctor>
#endif // HAVE_HALIDE

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
auto scale_node = std::make_shared<ngraph::op::Constant>(ngraph::element::f32,
ngraph::Shape{1}, &normScale);
Expand Down Expand Up @@ -2598,7 +2598,7 @@ struct ChannelsPReLUFunctor : public BaseFunctor
#endif // HAVE_CANN

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
const size_t numChannels = scale.total();
auto slope = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, ngraph::Shape{numChannels}, scale.data);
Expand Down Expand Up @@ -2678,7 +2678,7 @@ struct PReLUFunctor : public ChannelsPReLUFunctor
}

#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
std::shared_ptr<ngraph::Node> initNgraphAPI(const ngraph::Output<ngraph::Node>& node)
{
auto shape = getShape<size_t>(scale);
auto slope = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, shape, scale.ptr<float>());
Expand Down
15 changes: 9 additions & 6 deletions modules/dnn/src/layers/eltwise_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -896,12 +896,14 @@ class EltwiseLayerImpl CV_FINAL : public EltwiseLayer
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
CV_Assert(nodes.size() >= 2);
auto curr_node = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
if (!coeffs.empty()) {
auto coeff = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, ngraph::Shape{1}, &coeffs[0]);
curr_node = std::make_shared<ngraph::op::v1::Multiply>(curr_node, coeff, ngraph::op::AutoBroadcastType::NUMPY);
}

std::shared_ptr<ngraph::Node> res;
for (size_t i = 1; i < nodes.size(); i++)
{
auto next_node = nodes[i].dynamicCast<InfEngineNgraphNode>()->node;
Expand All @@ -910,15 +912,16 @@ class EltwiseLayerImpl CV_FINAL : public EltwiseLayer
next_node = std::make_shared<ngraph::op::v1::Multiply>(next_node, coeff, ngraph::op::AutoBroadcastType::NUMPY);
}
switch (op) {
case SUM: curr_node = std::make_shared<ngraph::op::v1::Add>(curr_node, next_node); break;
case PROD: curr_node = std::make_shared<ngraph::op::v1::Multiply>(curr_node, next_node); break;
case DIV: curr_node = std::make_shared<ngraph::op::v1::Divide>(curr_node, next_node); break;
case MAX: curr_node = std::make_shared<ngraph::op::v1::Maximum>(curr_node, next_node); break;
case MIN: curr_node = std::make_shared<ngraph::op::v1::Minimum>(curr_node, next_node); break;
case SUM: res = std::make_shared<ngraph::op::v1::Add>(curr_node, next_node); break;
case PROD: res = std::make_shared<ngraph::op::v1::Multiply>(curr_node, next_node); break;
case DIV: res = std::make_shared<ngraph::op::v1::Divide>(curr_node, next_node); break;
case MAX: res = std::make_shared<ngraph::op::v1::Maximum>(curr_node, next_node); break;
case MIN: res = std::make_shared<ngraph::op::v1::Minimum>(curr_node, next_node); break;
default: CV_Error(Error::StsNotImplemented, "Unsupported eltwise operation");
}
curr_node = res;
}
return Ptr<BackendNode>(new InfEngineNgraphNode(curr_node));
return Ptr<BackendNode>(new InfEngineNgraphNode(res));
}
#endif // HAVE_DNN_NGRAPH

Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/flatten_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ class FlattenLayerImpl CV_FINAL : public FlattenLayer
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto& ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
std::vector<size_t> dims = ieInpNode->get_shape();
std::vector<size_t> dims = ieInpNode.get_shape();

int numAxes = dims.size();
int startAxis = normalize_axis(_startAxis, numAxes);
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/fully_connected_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -803,7 +803,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
}
else
{
std::vector<int> shape(1 + normalize_axis(axis, ieInpNode->get_shape().size()), 0);
std::vector<int> shape(1 + normalize_axis(axis, ieInpNode.get_shape().size()), 0);
shape[shape.size() - 1] = -1;
auto inp = std::make_shared<ngraph::op::v1::Reshape>(
ieInpNode,
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/lrn_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,7 @@ class LRNLayerImpl CV_FINAL : public LRNLayer
if (type != SPATIAL_NRM) {
axes = {1};
} else {
axes.resize(ieInpNode->get_shape().size() - 2);
axes.resize(ieInpNode.get_shape().size() - 2);
std::iota(axes.begin(), axes.end(), 2);
}
auto ngraph_axes = std::make_shared<ngraph::op::Constant>(ngraph::element::i64, ngraph::Shape{axes.size()}, axes.data());
Expand Down
4 changes: 2 additions & 2 deletions modules/dnn/src/layers/max_unpooling_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ class MaxUnpoolLayerImpl CV_FINAL : public MaxUnpoolLayer
std::vector<MatShape> inpShapes(nodes.size());
std::vector<MatShape> outShapes, internals;
for (int i = 0; i < nodes.size(); ++i) {
std::vector<size_t> shape = nodes[i].dynamicCast<InfEngineNgraphNode>()->node->get_shape();
std::vector<size_t> shape = nodes[i].dynamicCast<InfEngineNgraphNode>()->node.get_shape();
inpShapes[i] = std::vector<int>(shape.begin(), shape.end());
}
getMemoryShapes(inpShapes, 1, outShapes, internals);
Expand All @@ -213,7 +213,7 @@ class MaxUnpoolLayerImpl CV_FINAL : public MaxUnpoolLayer
std::make_shared<ngraph::op::Constant>(ngraph::element::i32, ngraph::Shape{1}, &newShape),
true
);
if (indices->get_element_type() != ngraph::element::i32 && indices->get_element_type() != ngraph::element::i64) {
if (indices.get_element_type() != ngraph::element::i32 && indices.get_element_type() != ngraph::element::i64) {
indices = std::make_shared<ngraph::op::Convert>(indices, ngraph::element::i64);
}

Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/layers/mvn_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ class MVNLayerImpl CV_FINAL : public MVNLayer
auto mvn = std::make_shared<ngraph::op::MVN>(ieInpNode, acrossChannels, normVariance, eps);
#else
int64_t start_axis = acrossChannels ? 1 : 2;
std::vector<int64_t> axes_v(ieInpNode->get_shape().size() - start_axis);
std::vector<int64_t> axes_v(ieInpNode.get_shape().size() - start_axis);
std::iota(axes_v.begin(), axes_v.end(), start_axis);
auto axes = std::make_shared<ngraph::op::Constant>(ngraph::element::i64, ngraph::Shape{axes_v.size()}, axes_v.data());
auto mvn = std::make_shared<ngraph::op::v6::MVN>(ieInpNode, axes, normVariance, eps, ngraph::op::MVNEpsMode::INSIDE_SQRT);
Expand Down
6 changes: 3 additions & 3 deletions modules/dnn/src/layers/nary_eltwise_layers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -900,12 +900,12 @@ class NaryEltwiseLayerImpl CV_FINAL : public NaryEltwiseLayer
auto& inp0 = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
auto& inp1 = nodes[1].dynamicCast<InfEngineNgraphNode>()->node;

if (inp0->get_element_type() != inp1->get_element_type()) {
if (inp0.get_element_type() != inp1.get_element_type()) {
auto dtype = preferableTarget == DNN_TARGET_OPENCL_FP16 || preferableTarget == DNN_TARGET_MYRIAD ?
ngraph::element::f16 : ngraph::element::f32;
if (inp0->get_element_type() != dtype)
if (inp0.get_element_type() != dtype)
inp0 = std::make_shared<ngraph::op::v0::Convert>(inp0, dtype);
if (inp1->get_element_type() != dtype)
if (inp1.get_element_type() != dtype)
inp1 = std::make_shared<ngraph::op::v0::Convert>(inp1, dtype);
}

Expand Down
Loading