Compute Library
 21.11
Layers.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2018-2021 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #ifndef ARM_COMPUTE_GRAPH_LAYERS_H
25 #define ARM_COMPUTE_GRAPH_LAYERS_H
26 
32 
34 
35 #include <memory>
36 #include <string>
37 
38 namespace arm_compute
39 {
40 namespace graph
41 {
42 namespace frontend
43 {
44 /** Input Layer */
45 class InputLayer final : public ILayer
46 {
47 public:
48  /** Construct an input layer.
49  *
50  * @param[in] desc Description of input tensor.
51  * @param[in] accessor Accessor to get input tensor data from.
52  */
54  : _desc(desc), _accessor(std::move(accessor))
55  {
56  }
57 
59  {
60  NodeParams common_params = { name(), s.hints().target_hint };
61  return GraphBuilder::add_input_node(s.graph(), common_params, _desc, std::move(_accessor));
62  }
63 
64 private:
65  TensorDescriptor _desc;
66  ITensorAccessorUPtr _accessor;
67 };
68 
69 /** Constant Layer */
70 class ConstantLayer final : public ILayer
71 {
72 public:
73  /** Construct a constant layer.
74  *
75  * @param[in] desc Description of input tensor.
76  * @param[in] accessor Accessor to get input tensor data from.
77  */
79  : _desc(desc), _accessor(std::move(accessor))
80  {
81  }
82 
84  {
85  NodeParams common_params = { name(), s.hints().target_hint };
86  return GraphBuilder::add_const_node(s.graph(), common_params, _desc, std::move(_accessor));
87  }
88 
89 private:
90  TensorDescriptor _desc;
91  ITensorAccessorUPtr _accessor;
92 };
93 
94 /** Output Layer */
95 class OutputLayer final : public ILayer
96 {
97 public:
98  /** Construct an output layer.
99  *
100  * @param[in] accessor Accessor to give output tensor data to.
101  * @param[in] connection_idx (Optional) Input connection index
102  */
103  OutputLayer(ITensorAccessorUPtr accessor, unsigned int connection_idx = 0)
104  : _accessor(std::move(accessor)), _connection_idx(connection_idx)
105  {
106  }
107 
109  {
110  NodeParams common_params = { name(), s.hints().target_hint };
111  NodeIdxPair input = { s.tail_node(), _connection_idx };
112  return GraphBuilder::add_output_node(s.graph(), common_params, input, std::move(_accessor));
113  }
114 
115 private:
116  ITensorAccessorUPtr _accessor;
117  unsigned int _connection_idx;
118 };
119 
120 /** Activation Layer */
121 class ActivationLayer final : public ILayer
122 {
123 public:
124  /** Construct an activation layer.
125  *
126  * @param[in] act_info Activation information
127  * @param[in] out_quant_info (Optional) Output quantization info
128  */
129  ActivationLayer(ActivationLayerInfo act_info,
130  const QuantizationInfo out_quant_info = QuantizationInfo())
131  : _act_info(act_info),
132  _out_quant_info(std::move(out_quant_info))
133  {
134  }
135 
137  {
138  NodeParams common_params = { name(), s.hints().target_hint };
139  NodeIdxPair input = { s.tail_node(), 0 };
140  return GraphBuilder::add_activation_node(s.graph(), common_params, input, _act_info, std::move(_out_quant_info));
141  }
142 
143 private:
144  ActivationLayerInfo _act_info;
145  const QuantizationInfo _out_quant_info;
146 };
147 
148 /** ArgMinMax Layer */
149 class ArgMinMaxLayer final : public ILayer
150 {
151 public:
152  /** Construct an activation layer.
153  *
154  * @param[in] op Reduction Operation: min or max
155  * @param[in] axis Axis to perform reduction along
156  * @param[in] out_data_type (Optional) Output tensor data type
157  * @param[in] out_quant_info (Optional) Output quantization info
158  */
160  unsigned int axis,
161  DataType out_data_type = DataType::UNKNOWN,
162  const QuantizationInfo out_quant_info = QuantizationInfo())
163  : _op(op),
164  _axis(axis),
165  _out_data_type(out_data_type),
166  _out_quant_info(std::move(out_quant_info))
167  {
168  }
169 
170  /** Create layer and add to the given stream.
171  *
172  * @param[in] s Stream to add layer to.
173  *
174  * @return ID of the created node.
175  */
177  {
178  NodeParams common_params = { name(), s.hints().target_hint };
179  NodeIdxPair input = { s.tail_node(), 0 };
180  return GraphBuilder::add_arg_min_max_node(s.graph(), common_params, input, _op, _axis, _out_data_type, std::move(_out_quant_info));
181  }
182 
183 private:
184  ReductionOperation _op;
185  unsigned int _axis;
186  DataType _out_data_type;
187  QuantizationInfo _out_quant_info;
188 };
189 
190 /** Batchnormalization Layer */
191 class BatchNormalizationLayer final : public ILayer
192 {
193 public:
194  /** Construct a batch normalization layer.
195  *
196  * @param[in] mean Accessor to get mean tensor data from.
197  * @param[in] var Accessor to get var tensor data from.
198  * @param[in] gamma (Optional) Accessor to get gamma tensor data from. Default: nullptr.
199  * @param[in] beta (Optional) Accessor to get beta tensor data from. Default: nullptr.
200  * @param[in] epsilon (Optional) Epsilon value. Default: 0.001.
201  */
204  ITensorAccessorUPtr gamma = nullptr,
205  ITensorAccessorUPtr beta = nullptr,
206  float epsilon = 0.001f)
207  : _mean(std::move(mean)), _var(std::move(var)), _gamma(std::move(gamma)), _beta(std::move(beta)), _epsilon(epsilon)
208  {
209  }
210 
212  {
213  ARM_COMPUTE_ERROR_ON(_mean == nullptr);
214  ARM_COMPUTE_ERROR_ON(_var == nullptr);
215 
216  NodeParams common_params = { name(), s.hints().target_hint };
217  NodeIdxPair input = { s.tail_node(), 0 };
218  return GraphBuilder::add_batch_normalization_node(s.graph(), common_params, input, _epsilon,
219  std::move(_mean), std::move(_var), std::move(_beta), std::move(_gamma));
220  }
221 
222 private:
223  ITensorAccessorUPtr _mean;
224  ITensorAccessorUPtr _var;
225  ITensorAccessorUPtr _gamma;
226  ITensorAccessorUPtr _beta;
227  float _epsilon;
228 };
229 
230 /** Bounding Box Transform Layer */
231 class BoundingBoxTransformLayer final : public ILayer
232 {
233 public:
234  /** Construct a bounding box transform layer.
235  *
236  * @param[in] sub_stream_input Graph sub-stream for the input
237  * @param[in] sub_stream_deltas Graph sub-stream for the deltas
238  * @param[in] info Contains BoundingBox operation information described in @ref BoundingBoxTransformInfo.
239  */
241  : _ss_input(sub_stream_input), _ss_deltas(sub_stream_deltas), _bbox_info(info)
242  {
243  }
244 
245  /** Create layer and add to the given stream.
246  *
247  * @param[in] s Stream to add layer to.
248  *
249  * @return ID of the created node.
250  */
252  {
253  NodeParams common_params = { name(), s.hints().target_hint };
254  NodeIdxPair input = { _ss_input.tail_node(), 0 };
255  NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
256  return GraphBuilder::add_bounding_box_transform_node(s.graph(), common_params, input, deltas, _bbox_info);
257  }
258 
259 private:
260  SubStream _ss_input;
261  SubStream _ss_deltas;
262  BoundingBoxTransformInfo _bbox_info;
263 };
264 
265 /** Channel Shuffle Layer */
266 class ChannelShuffleLayer final : public ILayer
267 {
268 public:
269  /** Construct a Channel Shuffle layer.
270  *
271  * @param[in] num_groups Number of groups
272  */
274  : _num_groups(num_groups)
275  {
276  }
277 
279  {
280  NodeParams common_params = { name(), s.hints().target_hint };
281  NodeIdxPair input = { s.tail_node(), 0 };
282  return GraphBuilder::add_channel_shuffle_node(s.graph(), common_params, input, _num_groups);
283  }
284 
285 private:
286  unsigned int _num_groups;
287 };
288 
289 /** Concat Layer */
290 class ConcatLayer final : public ILayer
291 {
292 public:
293  /** Construct a concatenation layer
294  *
295  * @param[in] sub_stream1 First graph branch
296  * @param[in] sub_stream2 Second graph branch
297  * @param[in] rest_sub_streams Rest sub-graph branches
298  */
299  template <typename... Ts>
300  ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
301  : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
302  {
303  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
304  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
305 
306  utility::for_each([&](SubStream && sub_stream)
307  {
308  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
309  },
310  std::move(rest_sub_streams)...);
311  }
312  /** Construct a concatenation layer
313  *
314  * @param[in] concat_descriptor Concat layer descriptor
315  * @param[in] sub_stream1 First graph branch
316  * @param[in] sub_stream2 Second graph branch
317  * @param[in] rest_sub_streams Rest sub-graph branches
318  */
319  template <typename... Ts>
320  ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
321  : _sub_streams(), _concat_descriptor(concat_descriptor)
322  {
323  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
324  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
325 
326  utility::for_each([&](SubStream && sub_stream)
327  {
328  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
329  },
330  std::move(rest_sub_streams)...);
331  }
332  /** Construct a concat layer
333  *
334  * @param[in] sub_stream Sub-stream
335  */
336  template <typename... Ts>
337  ConcatLayer(SubStream &&sub_stream)
338  : _sub_streams(), _concat_descriptor(DataLayoutDimension::CHANNEL)
339  {
340  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
341  }
343  {
344  NodeID nid = EmptyNodeID;
345  NodeParams common_params = { name(), s.hints().target_hint };
346  if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
347  {
348  nid = _sub_streams[0]->tail_node();
349  }
350  else
351  {
352  // Collect tail nodes and concatenate
353  std::vector<NodeIdxPair> nodes;
354  for(auto &ss : _sub_streams)
355  {
356  if(ss && (ss->tail_node() != EmptyNodeID))
357  {
358  const auto tail_node = s.graph().node(ss->tail_node());
359  if(tail_node != nullptr && tail_node->type() != NodeType::Output)
360  {
361  nodes.push_back({ ss->tail_node(), 0 });
362  }
363  }
364  }
365  nid = GraphBuilder::add_concatenate_node(s.graph(), common_params, nodes, _concat_descriptor);
366  }
367  return nid;
368  }
369 
370 private:
371  std::vector<std::unique_ptr<SubStream>> _sub_streams;
372  descriptors::ConcatLayerDescriptor _concat_descriptor;
373 };
374 
375 /** Convolution Layer */
376 class ConvolutionLayer final : public ILayer
377 {
378 public:
379  /** Construct a convolution layer.
380  *
381  * @param[in] conv_width Convolution width.
382  * @param[in] conv_height Convolution height.
383  * @param[in] ofm Output feature map.
384  * @param[in] weights Accessor to get kernel weights from.
385  * @param[in] bias Accessor to get kernel bias from.
386  * @param[in] conv_info Padding and stride information.
387  * @param[in] num_groups (Optional) Number of groups. Default: 1.
388  * @param[in] weights_quant_info (Optional) Weights quantization information
389  * @param[in] out_quant_info (Optional) Output quantization info
390  */
391  ConvolutionLayer(unsigned int conv_width,
392  unsigned int conv_height,
393  unsigned int ofm,
394  ITensorAccessorUPtr weights,
395  ITensorAccessorUPtr bias,
396  PadStrideInfo conv_info,
397  unsigned int num_groups = 1,
398  const QuantizationInfo weights_quant_info = QuantizationInfo(),
399  const QuantizationInfo out_quant_info = QuantizationInfo())
400  : _conv_width(conv_width),
401  _conv_height(conv_height),
402  _ofm(ofm),
403  _conv_info(std::move(conv_info)),
404  _num_groups(num_groups),
405  _weights(std::move(weights)),
406  _bias(std::move(bias)),
407  _weights_quant_info(std::move(weights_quant_info)),
408  _out_quant_info(std::move(out_quant_info))
409  {
410  }
411 
413  {
414  NodeIdxPair input = { s.tail_node(), 0 };
415  NodeParams common_params = { name(), s.hints().target_hint };
416  return GraphBuilder::add_convolution_node(s.graph(), common_params, input,
417  Size2D(_conv_width, _conv_height), _ofm, _conv_info, _num_groups,
419  std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
420  }
421 
422 private:
423  unsigned int _conv_width;
424  unsigned int _conv_height;
425  unsigned int _ofm;
426  const PadStrideInfo _conv_info;
427  unsigned int _num_groups;
428  ITensorAccessorUPtr _weights;
429  ITensorAccessorUPtr _bias;
430  const QuantizationInfo _weights_quant_info;
431  const QuantizationInfo _out_quant_info;
432 };
433 
434 /** Deconvolution Layer */
435 class DeconvolutionLayer final : public ILayer
436 {
437 public:
438  /** Construct a convolution layer.
439  *
440  * @param[in] conv_width Convolution width.
441  * @param[in] conv_height Convolution height.
442  * @param[in] ofm Output feature map.
443  * @param[in] weights Accessor to get kernel weights from.
444  * @param[in] bias Accessor to get kernel bias from.
445  * @param[in] deconv_info Padding and stride information.
446  */
447  DeconvolutionLayer(unsigned int conv_width,
448  unsigned int conv_height,
449  unsigned int ofm,
450  ITensorAccessorUPtr weights,
451  ITensorAccessorUPtr bias,
452  PadStrideInfo deconv_info)
453  : _conv_width(conv_width),
454  _conv_height(conv_height),
455  _ofm(ofm),
456  _deconv_info(std::move(deconv_info)),
457  _weights(std::move(weights)),
458  _bias(std::move(bias))
459  {
460  }
461 
463  {
464  NodeIdxPair input = { s.tail_node(), 0 };
465  NodeParams common_params = { name(), s.hints().target_hint };
466  return GraphBuilder::add_deconvolution_node(s.graph(), common_params, input,
467  Size2D(_conv_width, _conv_height), _ofm, _deconv_info,
468  std::move(_weights), std::move(_bias));
469  }
470 
471 private:
472  unsigned int _conv_width;
473  unsigned int _conv_height;
474  unsigned int _ofm;
475  const PadStrideInfo _deconv_info;
476  ITensorAccessorUPtr _weights;
477  ITensorAccessorUPtr _bias;
478 };
479 
480 /** Depthwise Convolution Layer */
481 class DepthwiseConvolutionLayer final : public ILayer
482 {
483 public:
484  /** Construct a depthwise convolution layer.
485  *
486  * @param[in] conv_width Convolution width.
487  * @param[in] conv_height Convolution height.
488  * @param[in] weights Accessor to get kernel weights from.
489  * @param[in] bias Accessor to get kernel bias from.
490  * @param[in] conv_info Padding and stride information.
491  * @param[in] depth_multiplier (Optional) Depth multiplier parameter.
492  * @param[in] weights_quant_info (Optional) Quantization info used for weights
493  * @param[in] out_quant_info (Optional) Output quantization info
494  */
495  DepthwiseConvolutionLayer(unsigned int conv_width,
496  unsigned int conv_height,
497  ITensorAccessorUPtr weights,
498  ITensorAccessorUPtr bias,
499  PadStrideInfo conv_info,
500  int depth_multiplier = 1,
501  const QuantizationInfo weights_quant_info = QuantizationInfo(),
502  const QuantizationInfo out_quant_info = QuantizationInfo())
503  : _conv_width(conv_width),
504  _conv_height(conv_height),
505  _conv_info(std::move(conv_info)),
506  _weights(std::move(weights)),
507  _bias(std::move(bias)),
508  _depth_multiplier(depth_multiplier),
509  _weights_quant_info(std::move(weights_quant_info)),
510  _out_quant_info(std::move(out_quant_info))
511  {
512  }
513 
515  {
516  NodeIdxPair input = { s.tail_node(), 0 };
517  NodeParams common_params = { name(), s.hints().target_hint };
518  return GraphBuilder::add_depthwise_convolution_node(s.graph(), common_params,
519  input, Size2D(_conv_width, _conv_height), _conv_info, _depth_multiplier,
521  std::move(_weights), std::move(_bias), std::move(_weights_quant_info), std::move(_out_quant_info));
522  }
523 
524 private:
525  unsigned int _conv_width;
526  unsigned int _conv_height;
527  const PadStrideInfo _conv_info;
528  ITensorAccessorUPtr _weights;
529  ITensorAccessorUPtr _bias;
530  int _depth_multiplier;
531  const QuantizationInfo _weights_quant_info;
532  const QuantizationInfo _out_quant_info;
533 };
534 
535 /** DepthToSpace Layer */
536 class DepthToSpaceLayer final : public ILayer
537 {
538 public:
539  /** Construct an DepthToSpace layer.
540  *
541  * @param[in] block_shape Block size to rearranged
542  */
543  DepthToSpaceLayer(int32_t block_shape)
544  : _block_shape(block_shape)
545  {
546  }
547 
549  {
550  NodeParams common_params = { name(), s.hints().target_hint };
551  NodeIdxPair input = { s.tail_node(), 0 };
552  return GraphBuilder::add_depth_to_space_node(s.graph(), common_params, input, _block_shape);
553  }
554 
555 private:
556  int32_t _block_shape;
557 };
558 
559 /** Dequantization Layer */
560 class DequantizationLayer final : public ILayer
561 {
562 public:
563  /** Construct a dequantization layer.
564  *
565  */
567  {
568  }
569 
571  {
572  NodeParams common_params = { name(), s.hints().target_hint };
573  NodeIdxPair input = { s.tail_node(), 0 };
574  return GraphBuilder::add_dequantization_node(s.graph(), common_params, input);
575  }
576 };
577 
578 /** DetectionOutput Layer */
579 class DetectionOutputLayer final : public ILayer
580 {
581 public:
582  /** Construct a detection output layer.
583  *
584  * @param[in] sub_stream_conf Confidence graph sub-stream.
585  * @param[in] sub_stream_prior PriorBox graph sub-stream.
586  * @param[in] detect_info DetectionOutput parameters.
587  */
588  DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, const DetectionOutputLayerInfo &detect_info)
589  : _ss_conf(std::move(sub_stream_conf)), _ss_prior(std::move(sub_stream_prior)), _detect_info(detect_info)
590  {
591  }
592 
594  {
595  NodeParams common_params = { name(), s.hints().target_hint };
596  NodeIdxPair input_loc = { s.tail_node(), 0 };
597  NodeIdxPair input_conf = { _ss_conf.tail_node(), 0 };
598  NodeIdxPair input_priorbox = { _ss_prior.tail_node(), 0 };
599  return GraphBuilder::add_detection_output_node(s.graph(), common_params, input_loc, input_conf, input_priorbox, _detect_info);
600  }
601 
602 private:
603  SubStream _ss_conf;
604  SubStream _ss_prior;
605  DetectionOutputLayerInfo _detect_info;
606 };
607 /** DetectionOutputPostProcess Layer */
608 class DetectionPostProcessLayer final : public ILayer
609 {
610 public:
611  /** Construct a detection output layer.
612  *
613  * @param[in] sub_stream_class_prediction Class prediction graph sub-stream.
614  * @param[in] detect_info DetectionOutput parameters.
615  * @param[in] anchors Accessor to get anchors tensor data from.
616  * @param[in] out_quant_info (Optional) Output quantization info
617  */
618  DetectionPostProcessLayer(SubStream &&sub_stream_class_prediction, DetectionPostProcessLayerInfo detect_info, ITensorAccessorUPtr anchors,
619  const QuantizationInfo out_quant_info = QuantizationInfo())
620  : _sub_stream_class_prediction(std::move(sub_stream_class_prediction)), _detect_info(detect_info), _anchors(std::move(anchors)), _out_quant_info(std::move(out_quant_info))
621  {
622  }
623 
625  {
626  ARM_COMPUTE_ERROR_ON(_anchors == nullptr);
627 
628  NodeParams common_params = { name(), s.hints().target_hint };
629  NodeIdxPair input_box_encoding = { s.tail_node(), 0 };
630  NodeIdxPair input_class_prediction = { _sub_stream_class_prediction.tail_node(), 0 };
631  return GraphBuilder::add_detection_post_process_node(s.graph(), common_params, input_box_encoding, input_class_prediction, _detect_info, std::move(_anchors), std::move(_out_quant_info));
632  }
633 
634 private:
635  SubStream _sub_stream_class_prediction;
636  DetectionPostProcessLayerInfo _detect_info;
637  ITensorAccessorUPtr _anchors;
638  const QuantizationInfo _out_quant_info;
639 };
640 /** Dummy Layer */
641 class DummyLayer final : public ILayer
642 {
643 public:
644  /** Construct a dummy layer.
645  *
646  * @param[in] shape Output shape
647  */
648  DummyLayer(TensorShape shape)
649  : _shape(shape)
650  {
651  }
652 
654  {
655  NodeParams common_params = { name(), s.hints().target_hint };
656  NodeIdxPair input = { s.tail_node(), 0 };
657  return GraphBuilder::add_dummy_node(s.graph(), common_params, input, _shape);
658  }
659 
660 private:
661  TensorShape _shape;
662 };
663 
664 class EltwiseLayer final : public ILayer
665 {
666 public:
667  /** Construct an element-wise operation layer
668  *
669  * @param[in] sub_stream0 First graph sub-stream
670  * @param[in] sub_stream1 First graph sub-stream
671  * @param[in] op Element-wise operation to perform
672  */
673  EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
674  : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1)), _op(op)
675  {
676  }
677 
679  {
680  NodeParams common_params = { name(), s.hints().target_hint };
681  NodeIdxPair input0 = { _ss0.tail_node(), 0 };
682  NodeIdxPair input1 = { _ss1.tail_node(), 0 };
683 
684  return GraphBuilder::add_elementwise_node(s.graph(), common_params, input0, input1, _op);
685  }
686 
687 private:
688  SubStream _ss0;
689  SubStream _ss1;
690  EltwiseOperation _op;
691 };
692 /** Flatten Layer */
693 class FlattenLayer final : public ILayer
694 {
695 public:
696  /** Construct a flatten layer. */
698  {
699  }
700 
702  {
703  NodeParams common_params = { name(), s.hints().target_hint };
704  NodeIdxPair input = { s.tail_node(), 0 };
705  return GraphBuilder::add_flatten_node(s.graph(), common_params, input);
706  }
707 };
708 
709 /** Fully Connected Layer */
710 class FullyConnectedLayer final : public ILayer
711 {
712 public:
713  /** Construct a fully connected layer.
714  *
715  * @param[in] num_outputs Number of outputs.
716  * @param[in] weights Accessor to get weights from.
717  * @param[in] bias Accessor to get bias from.
718  * @param[in] fc_info (Optional) Fully connected layer metadata
719  * @param[in] weights_quant_info (Optional) Weights quantization information
720  * @param[in] out_quant_info (Optional) Output quantization info
721  */
722  FullyConnectedLayer(unsigned int num_outputs,
723  ITensorAccessorUPtr weights,
724  ITensorAccessorUPtr bias,
725  const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
726  const QuantizationInfo weights_quant_info = QuantizationInfo(),
727  const QuantizationInfo out_quant_info = QuantizationInfo())
728  : _num_outputs(num_outputs),
729  _weights(std::move(weights)),
730  _bias(std::move(bias)),
731  _weights_ss(nullptr),
732  _bias_ss(nullptr),
733  _fc_info(fc_info),
734  _weights_quant_info(std::move(weights_quant_info)),
735  _out_quant_info(std::move(out_quant_info))
736  {
737  }
738 
739  /** Construct a fully connected layer.
740  *
741  * @param[in] num_outputs Number of outputs.
742  * @param[in] sub_stream_weights Graph sub-stream for the weights.
743  * @param[in] sub_stream_bias Graph sub-stream for the bias.
744  * @param[in] fc_info (Optional) Fully connected layer metadata
745  * @param[in] weights_quant_info (Optional) Weights quantization information
746  * @param[in] out_quant_info (Optional) Output quantization info
747  */
748  FullyConnectedLayer(unsigned int num_outputs,
749  SubStream sub_stream_weights,
750  SubStream sub_stream_bias,
751  const FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo(),
752  const QuantizationInfo weights_quant_info = QuantizationInfo(),
753  const QuantizationInfo out_quant_info = QuantizationInfo())
754  : _num_outputs(num_outputs),
755  _weights(nullptr),
756  _bias(nullptr),
757  _weights_ss(std::make_unique<SubStream>(std::move(sub_stream_weights))),
758  _bias_ss(std::make_unique<SubStream>(std::move(sub_stream_bias))),
759  _fc_info(fc_info),
760  _weights_quant_info(std::move(weights_quant_info)),
761  _out_quant_info(std::move(out_quant_info))
762  {
763  }
764 
765  /** Create layer and add to the given stream.
766  *
767  * @param[in] s Stream to add layer to.
768  *
769  * @return ID of the created node.
770  */
772  {
773  NodeParams common_params = { name(), s.hints().target_hint };
774  NodeIdxPair input = { s.tail_node(), 0 };
775  if(_weights != nullptr)
776  {
777  return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
778  std::move(_weights), std::move(_bias), _fc_info,
779  std::move(_weights_quant_info), std::move(_out_quant_info), s.hints().fast_math_hint);
780  }
781  else
782  {
783  ARM_COMPUTE_ERROR_ON(_weights_ss == nullptr);
784 
785  NodeID bias_nid = (_bias_ss == nullptr) ? EmptyNodeID : _bias_ss->tail_node();
786  return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
787  _weights_ss->tail_node(), bias_nid, _fc_info,
788  std::move(_out_quant_info), s.hints().fast_math_hint);
789  }
790  }
791 
792 private:
793  unsigned int _num_outputs;
794  ITensorAccessorUPtr _weights;
795  ITensorAccessorUPtr _bias;
796  std::unique_ptr<SubStream> _weights_ss;
797  std::unique_ptr<SubStream> _bias_ss;
798  const FullyConnectedLayerInfo _fc_info;
799  const QuantizationInfo _weights_quant_info;
800  const QuantizationInfo _out_quant_info;
801 };
802 
803 /** Generate Proposals Layer */
804 class GenerateProposalsLayer final : public ILayer
805 {
806 public:
807  /** Construct a generate proposals layer.
808  *
809  * @param[in] ss_scores Graph sub-stream for the scores.
810  * @param[in] ss_deltas Graph sub-stream for the deltas.
811  * @param[in] ss_anchors Graph sub-stream for the anchors.
812  * @param[in] info Generate Proposals operation information.
813  */
815  : _ss_scores(std::move(ss_scores)), _ss_deltas(std::move(ss_deltas)), _ss_anchors(std::move(ss_anchors)), _info(info)
816  {
817  }
818 
819  /** Create layer and add to the given stream.
820  *
821  * @param[in] s Stream to add layer to.
822  *
823  * @return ID of the created node.
824  */
826  {
827  NodeParams common_params = { name(), s.hints().target_hint };
828  NodeIdxPair scores = { _ss_scores.tail_node(), 0 };
829  NodeIdxPair deltas = { _ss_deltas.tail_node(), 0 };
830  NodeIdxPair anchors = { _ss_anchors.tail_node(), 0 };
831  return GraphBuilder::add_generate_proposals_node(s.graph(), common_params, scores, deltas, anchors, _info);
832  }
833 
834 private:
835  SubStream _ss_scores;
836  SubStream _ss_deltas;
837  SubStream _ss_anchors;
838  GenerateProposalsInfo _info;
839 };
840 
841 /** L2 Normalize Layer */
842 class L2NormalizeLayer final : public ILayer
843 {
844 public:
845  /** Construct a L2 Normalize layer.
846  *
847  * @param[in] axis Axis to perform normalization on
848  * @param[in] epsilon Lower bound value for the normalization
849  */
850  L2NormalizeLayer(int axis, float epsilon)
851  : _axis(axis), _epsilon(epsilon)
852  {
853  }
854 
856  {
857  NodeParams common_params = { name(), s.hints().target_hint };
858  NodeIdxPair input = { s.tail_node(), 0 };
859  return GraphBuilder::add_l2_normalize_node(s.graph(), common_params, input, _axis, _epsilon);
860  }
861 
862 private:
863  int _axis;
864  float _epsilon;
865 };
866 
867 /** Normalization Layer */
868 class NormalizationLayer final : public ILayer
869 {
870 public:
871  /** Construct a normalization layer.
872  *
873  * @param[in] norm_info Normalization information.
874  */
875  NormalizationLayer(NormalizationLayerInfo norm_info)
876  : _norm_info(norm_info)
877  {
878  }
879 
881  {
882  NodeParams common_params = { name(), s.hints().target_hint };
883  NodeIdxPair input = { s.tail_node(), 0 };
884  return GraphBuilder::add_normalization_node(s.graph(), common_params, input, _norm_info);
885  }
886 
887 private:
888  NormalizationLayerInfo _norm_info;
889 };
890 
891 /** Normalize planar YUV Layer */
892 class NormalizePlanarYUVLayer final : public ILayer
893 {
894 public:
895  /** Construct a normalize planar YUV layer.
896  *
897  * @param[in] mean Accessor to get mean tensor data from.
898  * @param[in] std Accessor to get std tensor data from.
899  */
902  : _mean(std::move(mean)), _std(std::move(std))
903  {
904  }
905 
907  {
908  ARM_COMPUTE_ERROR_ON(_mean == nullptr);
909  ARM_COMPUTE_ERROR_ON(_std == nullptr);
910 
911  NodeParams common_params = { name(), s.hints().target_hint };
912  NodeIdxPair input = { s.tail_node(), 0 };
913  return GraphBuilder::add_normalize_planar_yuv_node(s.graph(), common_params, input,
914  std::move(_mean), std::move(_std));
915  }
916 
917 private:
918  ITensorAccessorUPtr _mean;
919  ITensorAccessorUPtr _std;
920 };
921 
922 /** Pad Layer */
923 class PadLayer final : public ILayer
924 {
925 public:
926  /** Construct a pad layer.
927  *
928  * @param[in] padding The padding for each spatial dimension of the input tensor. The pair padding[i]
929  * specifies the front and the end padding in the i-th dimension.
930  * @param[in] pad_value Padding value to use. Defaults to 0.
931  */
932  PadLayer(PaddingList padding, PixelValue pad_value = PixelValue())
933  : _padding(padding), _pad_value(pad_value)
934  {
935  }
936 
938  {
939  NodeParams common_params = { name(), s.hints().target_hint };
940  NodeIdxPair input = { s.tail_node(), 0 };
941  return GraphBuilder::add_pad_node(s.graph(), common_params, input, _padding, _pad_value);
942  }
943 
944 private:
945  PaddingList _padding;
946  PixelValue _pad_value;
947 };
948 
949 /** Permute Layer */
950 class PermuteLayer final : public ILayer
951 {
952 public:
953  /** Construct a permute layer.
954  *
955  * @param[in] perm Permutation vector.
956  * @param[in] layout (Optional) Data layout to assign to permuted tensor.
957  * If UNKNOWN then the input's layout will be used.
958  */
960  : _perm(perm), _layout(layout)
961  {
962  }
963 
965  {
966  NodeParams common_params = { name(), s.hints().target_hint };
967  NodeIdxPair input = { s.tail_node(), 0 };
968  return GraphBuilder::add_permute_node(s.graph(), common_params, input, _perm, _layout);
969  }
970 
971 private:
972  PermutationVector _perm;
973  DataLayout _layout;
974 };
975 
976 /** Pooling Layer */
977 class PoolingLayer final : public ILayer
978 {
979 public:
980  /** Construct a pooling layer.
981  *
982  * @param[in] pool_info Pooling information.
983  */
984  PoolingLayer(PoolingLayerInfo pool_info)
985  : _pool_info(pool_info)
986  {
987  }
988 
990  {
991  NodeParams common_params = { name(), s.hints().target_hint };
992  NodeIdxPair input = { s.tail_node(), 0 };
993  return GraphBuilder::add_pooling_node(s.graph(), common_params, input, _pool_info);
994  }
995 
996 private:
997  PoolingLayerInfo _pool_info;
998 };
999 
1000 /** PRelu Layer */
1001 class PReluLayer final : public ILayer
1002 {
1003 public:
1004  /** Construct an PRelu operation layer
1005  *
1006  * @param[in] sub_stream0 First graph sub-stream
1007  * @param[in] sub_stream1 First graph sub-stream
1008  */
1009  PReluLayer(SubStream &&sub_stream0, SubStream &&sub_stream1)
1010  : _ss0(std::move(sub_stream0)), _ss1(std::move(sub_stream1))
1011  {
1012  }
1013 
1015  {
1016  NodeParams common_params = { name(), s.hints().target_hint };
1017  NodeIdxPair input = { _ss0.tail_node(), 0 };
1018  NodeIdxPair alpha = { _ss1.tail_node(), 0 };
1019 
1020  return GraphBuilder::add_prelu_node(s.graph(), common_params, input, alpha);
1021  }
1022 
1023 private:
1024  SubStream _ss0;
1025  SubStream _ss1;
1026 };
1027 
1028 /** Print Layer */
1029 class PrintLayer final : public ILayer
1030 {
1031 public:
1032  /** Construct a print layer.
1033  *
1034  * Example usage to locally dequantize and print a tensor:
1035  *
1036  * Tensor *output = new Tensor();
1037  * const auto transform = [output](ITensor *input)
1038  * {
1039  * output->allocator()->init(*input->info());
1040  * output->info()->set_data_type(DataType::F32);
1041  * output->allocator()->allocate();
1042  *
1043  * Window win;
1044  * win.use_tensor_dimensions(input->info()->tensor_shape());
1045  * Iterator in(input, win);
1046  * Iterator out(output, win);
1047  * execute_window_loop(win, [&](const Coordinates &)
1048  * {
1049  * *(reinterpret_cast<float *>(out.ptr())) = dequantize_qasymm8(*in.ptr(), input->info()->quantization_info().uniform());
1050  * }, in, out);
1051  *
1052  * return output;
1053  * };
1054  *
1055  * graph << InputLayer(input_descriptor.set_quantization_info(in_quant_info), get_input_accessor(common_params, nullptr, false))
1056  * << ...
1057  * << \\ CNN Layers
1058  * << ...
1059  * << PrintLayer(std::cout, IOFormatInfo(), transform)
1060  * << ...
1061  * << OutputLayer(get_output_accessor(common_params, 5));
1062  *
1063  * @param[in] stream Output stream.
1064  * @param[in] format_info (Optional) Format info.
1065  * @param[in] transform (Optional) Input transform function.
1066  */
1067  PrintLayer(std::ostream &stream, const IOFormatInfo &format_info = IOFormatInfo(), const std::function<ITensor *(ITensor *)> transform = nullptr)
1068  : _stream(stream), _format_info(format_info), _transform(transform)
1069  {
1070  }
1071 
1073  {
1074  NodeParams common_params = { name(), s.hints().target_hint };
1075  NodeIdxPair input = { s.tail_node(), 0 };
1076  return GraphBuilder::add_print_node(s.graph(), common_params, input, _stream, _format_info, _transform);
1077  }
1078 
1079 private:
1080  std::ostream &_stream;
1081  const IOFormatInfo &_format_info;
1082  const std::function<ITensor *(ITensor *)> _transform;
1083 };
1084 
1085 /** PriorBox Layer */
1086 class PriorBoxLayer final : public ILayer
1087 {
1088 public:
1089  /** Construct a priorbox layer.
1090  *
1091  * @param[in] sub_stream First graph sub-stream
1092  * @param[in] prior_info PriorBox parameters.
1093  */
1094  PriorBoxLayer(SubStream &&sub_stream, const PriorBoxLayerInfo &prior_info)
1095  : _ss(std::move(sub_stream)), _prior_info(prior_info)
1096  {
1097  }
1098 
1100  {
1101  NodeParams common_params = { name(), s.hints().target_hint };
1102  NodeIdxPair input0 = { s.tail_node(), 0 };
1103  NodeIdxPair input1 = { _ss.tail_node(), 0 };
1104  return GraphBuilder::add_priorbox_node(s.graph(), common_params, input0, input1, _prior_info);
1105  }
1106 
1107 private:
1108  SubStream _ss;
1109  PriorBoxLayerInfo _prior_info;
1110 };
1111 
1112 /** Quantization Layer */
1113 class QuantizationLayer final : public ILayer
1114 {
1115 public:
1116  /** Construct a quantization layer.
1117  *
1118  * @param[in] out_quant_info Output tensor quantization info
1119  */
1121  : _out_quant_info(out_quant_info)
1122  {
1123  }
1124 
1126  {
1127  NodeParams common_params = { name(), s.hints().target_hint };
1128  NodeIdxPair input = { s.tail_node(), 0 };
1129  return GraphBuilder::add_quantization_node(s.graph(), common_params, input, _out_quant_info);
1130  }
1131 
1132 private:
1133  QuantizationInfo _out_quant_info;
1134 };
1135 
1136 /** Reduction Layer */
1137 class ReductionLayer final : public ILayer
1138 {
1139 public:
1140  /** Construct a reduction layer.
1141  *
1142  * @param[in] op Reduction operation
1143  * @param[in] axis Reduction axis
1144  * @param[in] keep_dims (Optional) Whether to keep the reduced dimension after the operation. Defaults to true.
1145  */
1146  ReductionLayer(ReductionOperation op, unsigned int axis, bool keep_dims)
1147  : _op(op), _axis(axis), _keep_dims(keep_dims)
1148  {
1149  }
1150 
1152  {
1153  NodeParams common_params = { name(), s.hints().target_hint };
1154  NodeIdxPair input = { s.tail_node(), 0 };
1155  return GraphBuilder::add_reduction_operation_node(s.graph(), common_params, input, _op, _axis, _keep_dims);
1156  }
1157 
1158 private:
1159  ReductionOperation _op;
1160  unsigned int _axis;
1161  bool _keep_dims;
1162 };
1163 
1164 /** Reorg Layer */
1165 class ReorgLayer final : public ILayer
1166 {
1167 public:
1168  /** Construct a reorg layer.
1169  *
1170  * @param[in] stride Stride value to use for reorganizing the values in the output tensor.
1171  * It defines the spatial distance between 2 consecutive pixels in the x and y direction
1172  */
1173  ReorgLayer(int stride)
1174  : _stride(stride)
1175  {
1176  }
1177 
1179  {
1180  NodeParams common_params = { name(), s.hints().target_hint };
1181  NodeIdxPair input = { s.tail_node(), 0 };
1182  return GraphBuilder::add_reorg_node(s.graph(), common_params, input, _stride);
1183  }
1184 
1185 private:
1186  int _stride;
1187 };
1188 
1189 /** Reshape Layer */
1190 class ReshapeLayer final : public ILayer
1191 {
1192 public:
1193  /** Construct a reshape layer.
1194  *
1195  * @param[in] shape Target shape.
1196  */
1197  ReshapeLayer(TensorShape shape)
1198  : _shape(shape)
1199  {
1200  }
1201 
1203  {
1204  NodeParams common_params = { name(), s.hints().target_hint };
1205  NodeIdxPair input = { s.tail_node(), 0 };
1206  return GraphBuilder::add_reshape_node(s.graph(), common_params, input, _shape);
1207  }
1208 
1209 private:
1210  TensorShape _shape;
1211 };
1212 
1213 /** Resize Layer */
1214 class ResizeLayer final : public ILayer
1215 {
1216 public:
1217  ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
1218  : _policy(policy), _width_scale(width_scale), _height_scale(height_scale)
1219  {
1220  }
1221 
1223  {
1224  NodeParams common_params = { name(), s.hints().target_hint };
1225  NodeIdxPair input = { s.tail_node(), 0 };
1226  return GraphBuilder::add_resize_node(s.graph(), common_params, input, _policy, _width_scale, _height_scale);
1227  }
1228 
1229 private:
1230  InterpolationPolicy _policy;
1231  float _width_scale;
1232  float _height_scale;
1233 };
1234 
1235 /** ROIAlign Layer */
1236 class ROIAlignLayer final : public ILayer
1237 {
1238 public:
1239  /** Construct a RoiAlign layer.
1240  *
1241  * @param[in] sub_stream_input Graph sub-stream for the input
1242  * @param[in] sub_stream_rois Graph sub-stream for the rois
1243  * @param[in] pool_info Pooling information.
1244  */
1245  ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
1246  : _ss_input(sub_stream_input), _ss_rois(sub_stream_rois), _pool_info(pool_info)
1247  {
1248  }
1249 
1250  /** Prevent instances of this class from being copy constructed */
1251  ROIAlignLayer(const ROIAlignLayer &) = delete;
1252  /** Prevent instances of this class from being copied */
1253  ROIAlignLayer &operator=(const ROIAlignLayer &) = delete;
1254 
1256  {
1257  NodeParams common_params = { name(), s.hints().target_hint };
1258  NodeIdxPair input = { _ss_input.tail_node(), 0 };
1259  NodeIdxPair rois = { _ss_rois.tail_node(), 0 };
1260  return GraphBuilder::add_roi_align_node(s.graph(), common_params, input, rois, _pool_info);
1261  }
1262 
1263 private:
1264  SubStream _ss_input;
1265  SubStream _ss_rois;
1266  ROIPoolingLayerInfo _pool_info;
1267 };
1268 
1269 /** Scale Layer */
1270 class ScaleLayer final : public ILayer
1271 {
1272 public:
1273  /** Construct a scale layer.
1274  *
1275  * @param[in] mul_w Accessor to get mul weight from.
1276  * @param[in] add_w Accessor to get add weight from.
1277  */
1279  ITensorAccessorUPtr add_w)
1280  : _mul_w(std::move(mul_w)), _add_w(std::move(add_w))
1281  {
1282  }
1283 
1285  {
1286  NodeParams common_params = { name(), s.hints().target_hint };
1287  NodeIdxPair input = { s.tail_node(), 0 };
1288  return GraphBuilder::add_scale_layer(s.graph(), common_params, input, std::move(_mul_w), std::move(_add_w));
1289  }
1290 
1291 private:
1292  ITensorAccessorUPtr _mul_w;
1293  ITensorAccessorUPtr _add_w;
1294 };
1295 
1296 /** Slice Layer */
1297 class SliceLayer final : public ILayer
1298 {
1299 public:
1300  /** Construct a slice layer.
1301  *
1302  * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1303  * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1304  */
1306  : _starts(starts), _ends(ends)
1307  {
1308  }
1309 
1311  {
1312  NodeParams common_params = { name(), s.hints().target_hint };
1313  NodeIdxPair input = { s.tail_node(), 0 };
1314  return GraphBuilder::add_slice_node(s.graph(), common_params, input, _starts, _ends);
1315  }
1316 
1317 private:
1318  Coordinates _starts;
1319  Coordinates _ends;
1320 };
1321 
1322 /** Softmax Layer */
1323 class SoftmaxLayer final : public ILayer
1324 {
1325 public:
1326  /** Construct a softmax layer.
1327  *
1328  * @param[in] beta (Optional) Beta value. Default 1.0.
1329  */
1330  SoftmaxLayer(float beta = 1.0f)
1331  : _beta(beta)
1332  {
1333  }
1334 
1336  {
1337  NodeParams common_params = { name(), s.hints().target_hint };
1338  NodeIdxPair input = { s.tail_node(), 0 };
1339  return GraphBuilder::add_softmax_node(s.graph(), common_params, input, _beta);
1340  }
1341 
1342 private:
1343  float _beta;
1344 };
1345 
1346 /** Stack Layer */
1347 class StackLayer final : public ILayer
1348 {
1349 public:
1350  /** Construct a concatenation layer
1351  *
1352  * @param[in] sub_stream1 First graph branch
1353  * @param[in] sub_stream2 Second graph branch
1354  * @param[in] rest_sub_streams Rest sub-graph branches
1355  */
1356  template <typename... Ts>
1357  StackLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1358  : _sub_streams(), _axis(0)
1359  {
1360  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
1361  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
1362 
1363  utility::for_each([&](SubStream && sub_stream)
1364  {
1365  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
1366  },
1367  std::move(rest_sub_streams)...);
1368  }
1369  /** Construct a concatenation layer
1370  *
1371  * @param[in] axis Stack layer axis along which to stack the inputs
1372  * @param[in] sub_stream1 First graph branch
1373  * @param[in] sub_stream2 Second graph branch
1374  * @param[in] rest_sub_streams Rest sub-graph branches
1375  */
1376  template <typename... Ts>
1377  StackLayer(int axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
1378  : _sub_streams(), _axis(axis)
1379  {
1380  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream1)));
1381  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream2)));
1382 
1383  utility::for_each([&](SubStream && sub_stream)
1384  {
1385  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
1386  },
1387  std::move(rest_sub_streams)...);
1388  }
1389  /** Construct a concat layer
1390  *
1391  * @param[in] sub_stream Sub-stream
1392  */
1393  template <typename... Ts>
1394  StackLayer(SubStream &&sub_stream)
1395  : _sub_streams(), _axis(0)
1396  {
1397  _sub_streams.push_back(std::make_unique<SubStream>(std::move(sub_stream)));
1398  }
1400  {
1401  NodeID nid = EmptyNodeID;
1402  NodeParams common_params = { name(), s.hints().target_hint };
1403  if(_sub_streams.size() == 1 && _sub_streams.at(0) != nullptr)
1404  {
1405  nid = _sub_streams[0]->tail_node();
1406  }
1407  else
1408  {
1409  // Collect tail nodes and stack
1410  std::vector<NodeIdxPair> nodes;
1411  for(auto &ss : _sub_streams)
1412  {
1413  if(ss && (ss->tail_node() != EmptyNodeID))
1414  {
1415  const auto tail_node = s.graph().node(ss->tail_node());
1416  if(tail_node != nullptr && tail_node->type() != NodeType::Output)
1417  {
1418  nodes.push_back({ ss->tail_node(), 0 });
1419  }
1420  }
1421  }
1422  nid = GraphBuilder::add_stack_node(s.graph(), common_params, nodes, _axis);
1423  }
1424  return nid;
1425  }
1426 
1427 private:
1428  std::vector<std::unique_ptr<SubStream>> _sub_streams;
1429  int _axis;
1430 };
1431 
1432 /** StridedSlice Layer */
1433 class StridedSliceLayer final : public ILayer
1434 {
1435 public:
1436  /** Construct a strided slice layer.
1437  *
1438  * @param[in] starts The starts of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1439  * @param[in] ends The ends of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1440  * @param[in] strides The strides of the dimensions of the input tensor to be sliced. The length must be of rank(input).
1441  * @param[in] strided_slice_info Contains masks for the starts, ends and strides
1442  */
1443  StridedSliceLayer(Coordinates &starts, Coordinates &ends, BiStrides &strides, StridedSliceLayerInfo strided_slice_info)
1444  : _starts(starts), _ends(ends), _strides(strides), _info(strided_slice_info)
1445  {
1446  }
1447 
1449  {
1450  NodeParams common_params = { name(), s.hints().target_hint };
1451  NodeIdxPair input = { s.tail_node(), 0 };
1452  return GraphBuilder::add_strided_slice_node(s.graph(), common_params, input, _starts, _ends, _strides, _info);
1453  }
1454 
1455 private:
1456  Coordinates _starts;
1457  Coordinates _ends;
1458  BiStrides _strides;
1459  StridedSliceLayerInfo _info;
1460 };
1461 
1462 /** YOLO Layer */
1463 class YOLOLayer final : public ILayer
1464 {
1465 public:
1466  /** Construct a YOLO layer.
1467  *
1468  * @param[in] act_info Activation info
1469  */
1470  YOLOLayer(ActivationLayerInfo act_info)
1471  : _act_info(act_info)
1472  {
1473  }
1474 
1476  {
1477  NodeParams common_params = { name(), s.hints().target_hint };
1478  NodeIdxPair input = { s.tail_node(), 0 };
1479  return GraphBuilder::add_yolo_node(s.graph(), common_params, input, _act_info);
1480  }
1481 
1482 private:
1483  ActivationLayerInfo _act_info;
1484 };
1485 } // namespace frontend
1486 } // namespace graph
1487 } // namespace arm_compute
1488 #endif /* ARM_COMPUTE_GRAPH_LAYERS_H */
static NodeID add_reduction_operation_node(Graph &g, NodeParams params, NodeIdxPair input, ReductionOperation op, int axis, bool keep_dims=true)
Adds a reduction sum layer node to the graph.
Common node parameters.
Definition: Types.h:269
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:825
Class describing the value of a pixel for any image format.
Definition: PixelValue.h:34
InterpolationPolicy
Interpolation method.
Definition: Types.h:398
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1310
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:548
static NodeID add_quantization_node(Graph &g, NodeParams params, NodeIdxPair input, const QuantizationInfo &out_quant_info)
Adds a quantization layer node to the graph.
DepthwiseConvolutionMethod depthwise_convolution_method_hint
Depthwise Convolution method hint.
Definition: Types.h:65
static NodeID add_batch_normalization_node(Graph &g, NodeParams params, NodeIdxPair input, float epsilon, ITensorAccessorUPtr mean_accessor=nullptr, ITensorAccessorUPtr var_accessor=nullptr, ITensorAccessorUPtr beta_accessor=nullptr, ITensorAccessorUPtr gamma_accessor=nullptr)
Adds a batch normalization layer node to the graph.
FlattenLayer()
Construct a flatten layer.
Definition: Layers.h:697
Generate Proposals Information class.
Definition: Types.h:1311
ROIAlignLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_rois, ROIPoolingLayerInfo pool_info)
Construct a RoiAlign layer.
Definition: Layers.h:1245
EltwiseOperation
Supported Element-wise operations.
Definition: Types.h:109
BoundingBoxTransformLayer(SubStream &&sub_stream_input, SubStream &&sub_stream_deltas, BoundingBoxTransformInfo info)
Construct a bounding box transform layer.
Definition: Layers.h:240
Target target_hint
Target execution hint.
Definition: Types.h:63
ReorgLayer(int stride)
Construct a reorg layer.
Definition: Layers.h:1173
static NodeID add_input_node(Graph &g, NodeParams params, const TensorDescriptor &desc, ITensorAccessorUPtr accessor=nullptr)
Adds an input layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:771
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1399
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:412
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:342
std::vector< PaddingInfo > PaddingList
List of padding information.
Definition: Types.h:440
ReductionOperation
Available reduction operations.
Definition: Types.h:463
InputLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
Construct an input layer.
Definition: Layers.h:53
EltwiseLayer(SubStream &&sub_stream0, SubStream &&sub_stream1, EltwiseOperation op)
Construct an element-wise operation layer.
Definition: Layers.h:673
DepthwiseConvolutionLayer(unsigned int conv_width, unsigned int conv_height, ITensorAccessorUPtr weights, ITensorAccessorUPtr bias, PadStrideInfo conv_info, int depth_multiplier=1, const QuantizationInfo weights_quant_info=QuantizationInfo(), const QuantizationInfo out_quant_info=QuantizationInfo())
Construct a depthwise convolution layer.
Definition: Layers.h:495
DataLayoutDimension
[DataLayout enum definition]
Definition: Types.h:124
static NodeID add_prelu_node(Graph &g, NodeParams params, NodeIdxPair input, NodeIdxPair alpha)
Adds a prelu layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:83
ConstantLayer(TensorDescriptor desc, ITensorAccessorUPtr accessor)
Construct a constant layer.
Definition: Layers.h:78
L2NormalizeLayer(int axis, float epsilon)
Construct a L2 Normalize layer.
Definition: Layers.h:850
ReshapeLayer(TensorShape shape)
Construct a reshape layer.
Definition: Layers.h:1197
FullyConnectedLayer(unsigned int num_outputs, ITensorAccessorUPtr weights, ITensorAccessorUPtr bias, const FullyConnectedLayerInfo fc_info=FullyConnectedLayerInfo(), const QuantizationInfo weights_quant_info=QuantizationInfo(), const QuantizationInfo out_quant_info=QuantizationInfo())
Construct a fully connected layer.
Definition: Layers.h:722
static NodeID add_resize_node(Graph &g, NodeParams params, NodeIdxPair input, InterpolationPolicy policy, float width_scale, float height_scale)
Adds a resize layer node to the graph.
SliceLayer(Coordinates &starts, Coordinates &ends)
Construct a slice layer.
Definition: Layers.h:1305
static NodeID add_activation_node(Graph &g, NodeParams params, NodeIdxPair input, ActivationLayerInfo act_info, const QuantizationInfo &out_quant_info=QuantizationInfo())
Adds an activation layer node to the graph.
static NodeID add_normalization_node(Graph &g, NodeParams params, NodeIdxPair input, NormalizationLayerInfo norm_info)
Adds a normalization layer node to the graph.
static NodeID add_permute_node(Graph &g, NodeParams params, NodeIdxPair input, PermutationVector perm, DataLayout layout=DataLayout::UNKNOWN)
Adds a permute layer node to the graph.
Strides PermutationVector
Permutation vector.
Definition: Types.h:51
ConcatLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Construct a concatenation layer.
Definition: Layers.h:300
QuantizationLayer(QuantizationInfo out_quant_info)
Construct a quantization layer.
Definition: Layers.h:1120
std::stringstream ss(mlgo_str)
YOLOLayer(ActivationLayerInfo act_info)
Construct a YOLO layer.
Definition: Layers.h:1470
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:251
static NodeID add_yolo_node(Graph &g, NodeParams params, NodeIdxPair input, ActivationLayerInfo act_info)
Adds a yolo layer to the graph.
#define ARM_COMPUTE_ERROR_ON(cond)
If the condition is true then an error message is printed and an exception thrown.
Definition: Error.h:466
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:906
std::unique_ptr< ITensorAccessor > ITensorAccessorUPtr
PReluLayer(SubStream &&sub_stream0, SubStream &&sub_stream1)
Construct an PRelu operation layer.
Definition: Layers.h:1009
static NodeID add_fully_connected_layer(Graph &g, NodeParams params, NodeIdxPair input, unsigned int num_outputs, NodeID weights_nid, NodeID bias_nid=EmptyNodeID, const FullyConnectedLayerInfo fc_info=FullyConnectedLayerInfo(), const QuantizationInfo &out_quant_info=QuantizationInfo(), FastMathHint fast_math_hint=FastMathHint::Disabled)
Adds a fully connected layer node to the graph.
SoftmaxLayer(float beta=1.0f)
Construct a softmax layer.
Definition: Layers.h:1330
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:211
static NodeID add_bounding_box_transform_node(Graph &g, NodeParams params, NodeIdxPair input, NodeIdxPair deltas, BoundingBoxTransformInfo info)
Adds a bounding box transform layer node to the graph.
static NodeID add_reshape_node(Graph &g, NodeParams params, NodeIdxPair input, TensorShape shape)
Adds a reshape layer node to the graph.
static NodeID add_priorbox_node(Graph &g, NodeParams params, NodeIdxPair input0, NodeIdxPair input1, const PriorBoxLayerInfo &prior_info)
Adds a priorbox layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1284
Interface for CPU tensor.
Definition: ITensor.h:36
static NodeID add_concatenate_node(Graph &g, NodeParams params, const std::vector< NodeIdxPair > &inputs, const descriptors::ConcatLayerDescriptor &concat_descriptor)
Adds a depth concatenate node to the graph.
static NodeID add_generate_proposals_node(Graph &g, NodeParams params, NodeIdxPair scores, NodeIdxPair deltas, NodeIdxPair anchors, GenerateProposalsInfo info)
Adds a generate proposals layer node to the graph.
static NodeID add_slice_node(Graph &g, NodeParams params, NodeIdxPair input, Coordinates &starts, Coordinates &ends)
Adds a slice node to the graph.
static NodeID add_deconvolution_node(Graph &g, NodeParams params, NodeIdxPair input, Size2D kernel_spatial_extend, unsigned int depth, PadStrideInfo deconv_info, ITensorAccessorUPtr weights_accessor=nullptr, ITensorAccessorUPtr bias_accessor=nullptr)
Adds a deconvolution layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:570
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:989
static NodeID add_convolution_node(Graph &g, NodeParams params, NodeIdxPair input, Size2D kernel_spatial_extend, unsigned int depth, PadStrideInfo conv_info, unsigned int num_groups=1, ConvolutionMethod method=ConvolutionMethod::Default, FastMathHint fast_math_hint=FastMathHint::Disabled, ITensorAccessorUPtr weights_accessor=nullptr, ITensorAccessorUPtr bias_accessor=nullptr, const QuantizationInfo &weights_quant_info=QuantizationInfo(), const QuantizationInfo &out_quant_info=QuantizationInfo())
Adds a convolution layer node to the graph.
static NodeID add_depth_to_space_node(Graph &g, NodeParams params, NodeIdxPair input, int32_t block_shape)
Adds an depth to space layer node to the graph.
Copyright (c) 2017-2021 Arm Limited.
FullyConnectedLayer(unsigned int num_outputs, SubStream sub_stream_weights, SubStream sub_stream_bias, const FullyConnectedLayerInfo fc_info=FullyConnectedLayerInfo(), const QuantizationInfo weights_quant_info=QuantizationInfo(), const QuantizationInfo out_quant_info=QuantizationInfo())
Construct a fully connected layer.
Definition: Layers.h:748
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1335
DepthToSpaceLayer(int32_t block_shape)
Construct an DepthToSpace layer.
Definition: Layers.h:543
StackLayer(SubStream &&sub_stream)
Construct a concat layer.
Definition: Layers.h:1394
static NodeID add_detection_output_node(Graph &g, NodeParams params, NodeIdxPair input_loc, NodeIdxPair input_conf, NodeIdxPair input_priorbox, const DetectionOutputLayerInfo &detect_info)
Adds a detection output layer node to the graph.
static NodeID add_pad_node(Graph &g, NodeParams params, NodeIdxPair input, const PaddingList &paddings, PixelValue pad_value=PixelValue())
Adds a pad layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:701
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1125
DequantizationLayer()
Construct a dequantization layer.
Definition: Layers.h:566
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:880
Quantization information.
static NodeID add_print_node(Graph &g, NodeParams params, NodeIdxPair input, std::ostream &stream, const IOFormatInfo &format_info=IOFormatInfo(), const std::function< ITensor *(ITensor *)> transform=nullptr)
Adds a print layer node to the graph.
DetectionPostProcessLayer(SubStream &&sub_stream_class_prediction, DetectionPostProcessLayerInfo detect_info, ITensorAccessorUPtr anchors, const QuantizationInfo out_quant_info=QuantizationInfo())
Construct a detection output layer.
Definition: Layers.h:618
static NodeID add_stack_node(Graph &g, NodeParams params, const std::vector< NodeIdxPair > &inputs, int axis)
Adds a stack layer node to the graph.
PriorBoxLayer(SubStream &&sub_stream, const PriorBoxLayerInfo &prior_info)
Construct a priorbox layer.
Definition: Layers.h:1094
StackLayer(SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Construct a concatenation layer.
Definition: Layers.h:1357
OutputLayer(ITensorAccessorUPtr accessor, unsigned int connection_idx=0)
Construct an output layer.
Definition: Layers.h:103
ReductionLayer(ReductionOperation op, unsigned int axis, bool keep_dims)
Construct a reduction layer.
Definition: Layers.h:1146
PrintLayer(std::ostream &stream, const IOFormatInfo &format_info=IOFormatInfo(), const std::function< ITensor *(ITensor *)> transform=nullptr)
Construct a print layer.
Definition: Layers.h:1067
static NodeID add_elementwise_node(Graph &g, NodeParams params, NodeIdxPair input0, NodeIdxPair input1, EltwiseOperation operation)
Adds an element-wise layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:462
DetectionOutputPostProcess Layer.
Definition: Layers.h:608
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1255
NormalizePlanarYUVLayer(ITensorAccessorUPtr mean, ITensorAccessorUPtr std)
Construct a normalize planar YUV layer.
Definition: Layers.h:900
ActivationLayer(ActivationLayerInfo act_info, const QuantizationInfo out_quant_info=QuantizationInfo())
Construct an activation layer.
Definition: Layers.h:129
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:937
NodeID tail_node()
Returns the tail node of the Stream.
Definition: IStream.h:65
IO formatting information class.
Definition: Types.h:2216
ScaleLayer(ITensorAccessorUPtr mul_w, ITensorAccessorUPtr add_w)
Construct a scale layer.
Definition: Layers.h:1278
static NodeID add_reorg_node(Graph &g, NodeParams params, NodeIdxPair input, int stride)
Adds a reorg layer node to the graph.
const unsigned int num_groups
Definition: Im2Col.cpp:153
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:176
Coordinates of an item.
Definition: Coordinates.h:37
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:278
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1222
DeconvolutionLayer(unsigned int conv_width, unsigned int conv_height, unsigned int ofm, ITensorAccessorUPtr weights, ITensorAccessorUPtr bias, PadStrideInfo deconv_info)
Construct a convolution layer.
Definition: Layers.h:447
static NodeID add_roi_align_node(Graph &g, NodeParams params, NodeIdxPair input, NodeIdxPair rois, ROIPoolingLayerInfo pool_info)
Adds a ROI align layer node to the graph.
PriorBox layer info.
Definition: Types.h:798
static NodeID add_l2_normalize_node(Graph &g, NodeParams params, NodeIdxPair input, int axis, float epsilon)
Adds a L2 Normalize layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:58
ConcatLayer(SubStream &&sub_stream)
Construct a concat layer.
Definition: Layers.h:337
ConcatLayer(descriptors::ConcatLayerDescriptor concat_descriptor, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Construct a concatenation layer.
Definition: Layers.h:320
Bounding Box Transform information class.
Definition: Types.h:1442
BatchNormalizationLayer(ITensorAccessorUPtr mean, ITensorAccessorUPtr var, ITensorAccessorUPtr gamma=nullptr, ITensorAccessorUPtr beta=nullptr, float epsilon=0.001f)
Construct a batch normalization layer.
Definition: Layers.h:202
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1072
static NodeID add_dummy_node(Graph &g, NodeParams params, NodeIdxPair input, TensorShape shape)
Adds a Dummy node to the graph.
static NodeID add_dequantization_node(Graph &g, NodeParams params, NodeIdxPair input)
Adds a dequantization node to the graph.
void for_each(F &&)
Base case of for_each.
Definition: Utility.h:110
static NodeID add_normalize_planar_yuv_node(Graph &g, NodeParams params, NodeIdxPair input, ITensorAccessorUPtr mean_accessor=nullptr, ITensorAccessorUPtr std_accessor=nullptr)
Adds a normalize planar YUV layer node to the graph.
NodeID-index struct.
Definition: Types.h:262
StridedSliceLayer(Coordinates &starts, Coordinates &ends, BiStrides &strides, StridedSliceLayerInfo strided_slice_info)
Construct a strided slice layer.
Definition: Layers.h:1443
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:593
static NodeID add_channel_shuffle_node(Graph &g, NodeParams params, NodeIdxPair input, unsigned int num_groups)
Adds an channel shuffle layer node to the graph.
FastMathHint fast_math_hint
Fast math hint.
Definition: Types.h:66
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:964
Detection Output layer info.
Definition: Types.h:935
static NodeID add_detection_post_process_node(Graph &g, NodeParams params, NodeIdxPair input_box_encoding, NodeIdxPair input_class_prediction, const DetectionPostProcessLayerInfo &detect_info, ITensorAccessorUPtr anchors_accessor=nullptr, const QuantizationInfo &anchor_quant_info=QuantizationInfo())
Adds a detection post process layer node to the graph.
unsigned int NodeID
Definition: Types.h:69
ScaleKernelInfo info(interpolation_policy, default_border_mode, PixelValue(), sampling_policy, false)
static NodeID add_softmax_node(Graph &g, NodeParams params, NodeIdxPair input, float beta=1.f)
Adds a softmax node to the graph.
StackLayer(int axis, SubStream &&sub_stream1, SubStream &&sub_stream2, Ts &&... rest_sub_streams)
Construct a concatenation layer.
Definition: Layers.h:1377
ResizeLayer(InterpolationPolicy policy, float width_scale, float height_scale)
Definition: Layers.h:1217
virtual Graph & graph()=0
Returns the underlying graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:624
StreamHints & hints()
Returns the stream hints that are currently used.
Definition: IStream.h:73
ROI Pooling Layer Information class.
Definition: Types.h:1268
constexpr NodeID EmptyNodeID
Constant EdgeID specifying an equivalent of null edge.
Definition: Types.h:76
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1448
const INode * node(NodeID id) const
Get node object given its id.
Definition: Graph.cpp:204
static NodeID add_scale_layer(Graph &g, const NodeParams &params, NodeIdxPair input, ITensorAccessorUPtr mul_accessor=nullptr, ITensorAccessorUPtr add_accessor=nullptr)
Adds a scale layer node to the graph This layer computes a product of the input with a scale (read fr...
DetectionOutputLayer(SubStream &&sub_stream_conf, SubStream &&sub_stream_prior, const DetectionOutputLayerInfo &detect_info)
Construct a detection output layer.
Definition: Layers.h:588
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:136
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:514
Detection Output layer info.
Definition: Types.h:1054
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:653
PadLayer(PaddingList padding, PixelValue pad_value=PixelValue())
Construct a pad layer.
Definition: Layers.h:932
const std::string & name() const
Layer name accessor.
Definition: ILayer.h:64
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1475
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1099
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:678
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:855
static NodeID add_const_node(Graph &g, NodeParams params, const TensorDescriptor &desc, ITensorAccessorUPtr accessor=nullptr)
Adds a Const node to the graph.
static NodeID add_flatten_node(Graph &g, NodeParams params, NodeIdxPair input)
Adds a flatten layer node to the graph.
PoolingLayer(PoolingLayerInfo pool_info)
Construct a pooling layer.
Definition: Layers.h:984
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1178
ArgMinMaxLayer(ReductionOperation op, unsigned int axis, DataType out_data_type=DataType::UNKNOWN, const QuantizationInfo out_quant_info=QuantizationInfo())
Construct an activation layer.
Definition: Layers.h:159
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1014
static NodeID add_pooling_node(Graph &g, NodeParams params, NodeIdxPair input, PoolingLayerInfo pool_info)
Adds a pooling layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1202
PermuteLayer(PermutationVector perm, DataLayout layout=DataLayout::UNKNOWN)
Construct a permute layer.
Definition: Layers.h:959
DataType
Available data types.
Definition: Types.h:79
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:1151
static NodeID add_strided_slice_node(Graph &g, NodeParams params, NodeIdxPair input, Coordinates &starts, Coordinates &ends, BiStrides &strides, StridedSliceLayerInfo info)
Adds a strided slice node to the graph.
DataLayout
[DataLayout enum definition]
Definition: Types.h:113
GenerateProposalsLayer(SubStream &&ss_scores, SubStream &&ss_deltas, SubStream &&ss_anchors, GenerateProposalsInfo info)
Construct a generate proposals layer.
Definition: Layers.h:814
ConvolutionLayer(unsigned int conv_width, unsigned int conv_height, unsigned int ofm, ITensorAccessorUPtr weights, ITensorAccessorUPtr bias, PadStrideInfo conv_info, unsigned int num_groups=1, const QuantizationInfo weights_quant_info=QuantizationInfo(), const QuantizationInfo out_quant_info=QuantizationInfo())
Construct a convolution layer.
Definition: Layers.h:391
ConvolutionMethod convolution_method_hint
Convolution method hint.
Definition: Types.h:64
ChannelShuffleLayer(unsigned int num_groups)
Construct a Channel Shuffle layer.
Definition: Layers.h:273
DummyLayer(TensorShape shape)
Construct a dummy layer.
Definition: Layers.h:648
static NodeID add_arg_min_max_node(Graph &g, NodeParams params, NodeIdxPair input, ReductionOperation op, unsigned int axis, DataType out_data_type=DataType::UNKNOWN, const QuantizationInfo &out_quant_info=QuantizationInfo())
Adds an activation layer node to the graph.
static NodeID add_output_node(Graph &g, NodeParams params, NodeIdxPair input, ITensorAccessorUPtr accessor=nullptr)
Adds an output layer node to the graph.
static NodeID add_depthwise_convolution_node(Graph &g, NodeParams params, NodeIdxPair input, Size2D kernel_spatial_extend, PadStrideInfo conv_info, int depth_multiplier=1, DepthwiseConvolutionMethod method=DepthwiseConvolutionMethod::Default, ITensorAccessorUPtr weights_accessor=nullptr, ITensorAccessorUPtr bias_accessor=nullptr, const QuantizationInfo &quant_info=QuantizationInfo(), const QuantizationInfo &out_quant_info=QuantizationInfo())
Adds a depth-wise convolution layer node to the graph.
NodeID create_layer(IStream &s) override
Create layer and add to the given stream.
Definition: Layers.h:108
NormalizationLayer(NormalizationLayerInfo norm_info)
Construct a normalization layer.
Definition: Layers.h:875