19 {
20 if(layer.GetType() == LayerType::Input)
21 {
22
23
24 if (layer.GetDataType() == DataType::Float32)
25 {
27 }
28 }
29 else if (layer.GetType() == LayerType::Output)
30 {
31
32 Layer& connectedLayer = layer.GetInputSlots()[0].GetConnectedOutputSlot()->GetOwningLayer();
33 if (connectedLayer.GetType() != LayerType::DetectionPostProcess)
34 {
35
36
37 if (layer.GetDataType() == DataType::Float32)
38 {
39
40
42 }
43 }
44 }
45 else if (layer.GetType() != LayerType::ConvertFp32ToFp16 && layer.GetType() != LayerType::ConvertFp16ToFp32)
46 {
47
48
49 for (auto&& input = layer.BeginInputSlots(); input != layer.EndInputSlots(); ++input)
50 {
51
52
53 Layer& base = input->GetConnectedOutputSlot()->GetOwningLayer();
54 if (base.GetType() != LayerType::Input)
55 {
56 TensorInfo convertInfo = input->GetConnection()->GetTensorInfo();
57 if (convertInfo.GetDataType() == DataType::Float32)
58 {
59 convertInfo.SetDataType(DataType::Float16);
60 input->GetConnection()->SetTensorInfo(convertInfo);
61 }
62 }
63 }
64
65
66 if (layer.GetType() != LayerType::DetectionPostProcess)
67 {
68
69 for (auto&& output = layer.BeginOutputSlots(); output != layer.EndOutputSlots(); ++output)
70 {
71 TensorInfo convertInfo = output->GetTensorInfo();
72 if (convertInfo.GetDataType() == DataType::Float32)
73 {
74 convertInfo.SetDataType(DataType::Float16);
75 output->SetTensorInfo(convertInfo);
76 }
77 }
78 }
79 }
80 }
std::vector< ConvertFp32ToFp16Layer * > InsertConvertFp32ToFp16LayersAfter(Graph &graph, Layer &layer)
std::vector< ConvertFp16ToFp32Layer * > InsertConvertFp16ToFp32LayersBefore(Graph &graph, Layer &layer, bool expectCorrectInputType)