Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit105dfb0

Browse files
author
Anastasia Murzova
committed
Added Steps support in DNN Slice layer
1 parent1d6a1e5 commit105dfb0

File tree

4 files changed

+142
-9
lines changed

4 files changed

+142
-9
lines changed

‎modules/dnn/include/opencv2/dnn/all_layers.hpp‎

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -364,6 +364,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
364364
* Inner vector has slice ranges for the first number of input dimensions.
365365
*/
366366
std::vector<std::vector<Range> > sliceRanges;
367+
std::vector<std::vector<int> > sliceSteps;
367368
int axis;
368369
int num_split;
369370

‎modules/dnn/src/layers/slice_layer.cpp‎

Lines changed: 66 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ class SliceLayerImpl : public SliceLayer
6464
SliceLayerImpl(const LayerParams& params)
6565
{
6666
setParamsFrom(params);
67+
hasSteps =false;
6768
axis = params.get<int>("axis",1);
6869
num_split = params.get<int>("num_split",0);
6970
hasDynamicShapes = params.get<bool>("has_dynamic_shapes",false);
@@ -112,6 +113,22 @@ class SliceLayerImpl : public SliceLayer
112113
sliceRanges[0][i].end = end;// We'll finalize a negative value later.
113114
}
114115
}
116+
117+
if (params.has("steps"))
118+
{
119+
const DictValue &steps = params.get("steps");
120+
sliceSteps.resize(1);
121+
sliceSteps[0].resize(steps.size());
122+
123+
for (int i =0; i < steps.size(); ++i)
124+
{
125+
int step = steps.get<int>(i);
126+
CV_Assert(step >=1);
127+
if (step >1)
128+
hasSteps =true;
129+
sliceSteps[0][i] = step;
130+
}
131+
}
115132
}
116133
}
117134

@@ -120,11 +137,11 @@ class SliceLayerImpl : public SliceLayer
120137
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
121138
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
122139
returnINF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) &&
123-
sliceRanges.size() ==1 && sliceRanges[0].size() ==4;
140+
sliceRanges.size() ==1 && sliceRanges[0].size() ==4 && !hasSteps;
124141
#endif
125142
#ifdef HAVE_DNN_NGRAPH
126143
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
127-
return sliceRanges.size() ==1;
144+
return sliceRanges.size() ==1 && !hasSteps;
128145
#endif
129146
return backendId == DNN_BACKEND_OPENCV;
130147
}
@@ -147,6 +164,9 @@ class SliceLayerImpl : public SliceLayer
147164
{
148165
if (shapesInitialized || inpShape[j] >0)
149166
outputs[i][j] =normalize_axis_range(sliceRanges[i][j], inpShape[j]).size();
167+
168+
if (! sliceSteps.empty() && sliceSteps[i][j] >1)
169+
outputs[i][j] = (outputs[i][j] + sliceSteps[i][j] -1) / sliceSteps[i][j];
150170
}
151171
}
152172
}
@@ -181,6 +201,7 @@ class SliceLayerImpl : public SliceLayer
181201
const MatSize& inpShape = inputs[0].size;
182202

183203
finalSliceRanges = sliceRanges;
204+
184205
if (sliceRanges.empty())
185206
{
186207
// Divide input blob on equal parts by axis.
@@ -213,6 +234,9 @@ class SliceLayerImpl : public SliceLayer
213234
}
214235
}
215236

237+
if (!sliceSteps.empty() && sliceSteps[0].size() != inputs[0].dims)
238+
sliceSteps[0].resize(inputs[0].dims,1);
239+
216240
#if0
217241
std::cout << "DEBUG: DNN/Slice: " << outputs.size() << " inpShape=" << inpShape << std::endl;
218242
for (int i = 0; i < outputs.size(); ++i)
@@ -478,9 +502,24 @@ class SliceLayerImpl : public SliceLayer
478502

479503
const Mat& inpMat = inputs[0];
480504
CV_Assert(outputs.size() == finalSliceRanges.size());
481-
for (size_t i =0; i < outputs.size(); i++)
505+
506+
if (!hasSteps)
507+
{
508+
for (size_t i =0; i < outputs.size(); i++)
509+
{
510+
inpMat(finalSliceRanges[i]).copyTo(outputs[i]);
511+
}
512+
}
513+
else
482514
{
483-
inpMat(finalSliceRanges[i]).copyTo(outputs[i]);
515+
int dimsNum = inpMat.dims;
516+
517+
for (size_t i =0; i < outputs.size(); i++)
518+
{
519+
std::vector<int>inpIdx(dimsNum,0);
520+
std::vector<int>outIdx(dimsNum,0);
521+
getSliceRecursive(inpMat, inpIdx, finalSliceRanges[i], sliceSteps[i],0, dimsNum, outputs[i], outIdx);
522+
}
484523
}
485524
}
486525

@@ -570,11 +609,34 @@ class SliceLayerImpl : public SliceLayer
570609
}
571610
#endif// HAVE_DNN_NGRAPH
572611

612+
private:
613+
voidgetSliceRecursive(const Mat &inpMat, std::vector<int> &inpIdx,
614+
const std::vector<Range> &sliceRanges,
615+
const std::vector<int> &sliceSteps,int dim,int dimsNum,
616+
Mat &outputs, std::vector<int> &outIdx)
617+
{
618+
int begin = sliceRanges[dim].start;
619+
int end = sliceRanges[dim].end;
620+
int step = !sliceSteps.empty() ? sliceSteps[dim] :1;
621+
622+
for (int k = begin, j =0; k < end; k += step, j++)
623+
{
624+
inpIdx[dim] = k;
625+
outIdx[dim] = j;
626+
627+
if (dim +1 < dimsNum)
628+
getSliceRecursive(inpMat, inpIdx, sliceRanges, sliceSteps, dim +1, dimsNum, outputs, outIdx);
629+
else
630+
outputs.at<float>(outIdx.data()) = inpMat.at<float>(inpIdx.data());
631+
}
632+
}
633+
573634
protected:
574635
// The actual non-negative values determined from @p sliceRanges depends on input size.
575636
std::vector<std::vector<Range> > finalSliceRanges;
576637
bool hasDynamicShapes;
577638
bool shapesInitialized;
639+
bool hasSteps;
578640
};
579641

580642
classCropLayerImpl CV_FINAL : public SliceLayerImpl

‎modules/dnn/src/onnx/onnx_importer.cpp‎

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -641,18 +641,20 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
641641
int axis =0;
642642
std::vector<int> begin;
643643
std::vector<int> end;
644+
std::vector<int> steps;
644645
int inp_size = node_proto.input_size();
645646

646647
if (inp_size ==1)
647648
{
648649
if (layerParams.has("steps"))
649650
{
650-
DictValuesteps = layerParams.get("steps");
651-
for (int i =0; i <steps.size(); ++i)
651+
DictValuesteps_dict = layerParams.get("steps");
652+
for (int i =0; i <steps_dict.size(); ++i)
652653
{
653-
if (steps.get<int>(i) !=1)
654+
if (steps_dict.get<int>(i) !=1) {
654655
CV_Error(Error::StsNotImplemented,
655656
"Slice layer only supports steps = 1");
657+
}
656658
}
657659
}
658660
if (layerParams.has("axes")) {
@@ -677,7 +679,7 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
677679
int finish = ends.get<int>(i);
678680
end.push_back((finish <0) ? --finish : finish);// numpy doesn't include last dim
679681
}
680-
}else {
682+
}else {// inp_size > 1
681683
CV_Assert(inp_size >=3);
682684
for (int i =1; i < inp_size; i++) {
683685
CV_Assert(constBlobs.find(node_proto.input(i)) != constBlobs.end());
@@ -711,6 +713,12 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
711713
if (inp_size ==5) {
712714
CV_Assert(constBlobs.find(node_proto.input(4)) != constBlobs.end());
713715
Mat step_blob =getBlob(node_proto,4);
716+
constint* steps_ptr = step_blob.ptr<int>();
717+
718+
if (axis >0)
719+
steps.resize(axis,1);
720+
721+
std::copy(steps_ptr, steps_ptr + step_blob.total(),std::back_inserter(steps));
714722

715723
// Very strange application for Slice op with tensor reversing.
716724
// We just workaround it for 2d constants.
@@ -728,13 +736,15 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
728736
return;
729737
}
730738
}
731-
CV_CheckEQ(countNonZero(step_blob !=1),0,"Slice layer only supports steps = 1");
732739
}
733740
}
734741
layerParams.set("begin",DictValue::arrayInt(&begin[0], begin.size()));
735742
layerParams.set("end",DictValue::arrayInt(&end[0], end.size()));
736743
layerParams.set("axis", axis);
737744

745+
if (!steps.empty())
746+
layerParams.set("steps",DictValue::arrayInt(&steps[0], steps.size()));
747+
738748
if (constBlobs.find(node_proto.input(0)) != constBlobs.end())
739749
{
740750
Mat inp =getBlob(node_proto,0);

‎modules/dnn/test/test_onnx_importer.cpp‎

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -615,6 +615,66 @@ TEST_P(Test_ONNX_layers, Slice)
615615
#endif
616616
}
617617

618+
TEST_P(Test_ONNX_layers, Slice_Steps_2DInput)
619+
{
620+
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
621+
{
622+
if (target == DNN_TARGET_MYRIAD)applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
623+
}
624+
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
625+
{
626+
if (backend == DNN_BACKEND_OPENCV)
627+
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
628+
}
629+
630+
testONNXModels("slice_opset_11_steps_2d");
631+
}
632+
633+
TEST_P(Test_ONNX_layers, Slice_Steps_3DInput)
634+
{
635+
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
636+
{
637+
if (target == DNN_TARGET_MYRIAD)applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
638+
}
639+
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
640+
{
641+
if (backend == DNN_BACKEND_OPENCV)
642+
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
643+
}
644+
645+
testONNXModels("slice_opset_11_steps_3d");
646+
}
647+
648+
TEST_P(Test_ONNX_layers, Slice_Steps_4DInput)
649+
{
650+
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
651+
{
652+
if (target == DNN_TARGET_MYRIAD)applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
653+
}
654+
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
655+
{
656+
if (backend == DNN_BACKEND_OPENCV)
657+
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
658+
}
659+
660+
testONNXModels("slice_opset_11_steps_4d");
661+
}
662+
663+
TEST_P(Test_ONNX_layers, Slice_Steps_5DInput)
664+
{
665+
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
666+
{
667+
if (target == DNN_TARGET_MYRIAD)applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
668+
}
669+
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
670+
{
671+
if (backend == DNN_BACKEND_OPENCV)
672+
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
673+
}
674+
675+
testONNXModels("slice_opset_11_steps_5d");
676+
}
677+
618678
TEST_P(Test_ONNX_layers, Softmax)
619679
{
620680
testONNXModels("softmax");

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp