forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ie_iextension.h
278 lines (250 loc) · 9.42 KB
/
ie_iextension.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
/**
* @brief This is a header file for Inference Engine Extension Interface
*
* @file ie_iextension.h
*/
#pragma once
#if !defined(IN_OV_COMPONENT) && !defined(IE_LEGACY_HEADER_INCLUDED)
# define IE_LEGACY_HEADER_INCLUDED
# ifdef _MSC_VER
# pragma message( \
"The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# else
# warning("The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html")
# endif
#endif
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "ie_api.h"
#include "ie_blob.h"
#include "ie_common.h"
#include "ie_layouts.h"
#include "ie_version.hpp"
#include "ngraph/opsets/opset.hpp"
/**
* @def INFERENCE_EXTENSION_API(TYPE)
* @brief Defines Inference Engine Extension API method
*/
#if defined(_WIN32) || defined(__CYGWIN__)
# if defined(IMPLEMENT_INFERENCE_EXTENSION_API)
# define INFERENCE_EXTENSION_API(type) extern "C" __declspec(dllexport) type
# else
# define INFERENCE_EXTENSION_API(type) extern "C" type
# endif
#elif defined(__GNUC__) && (__GNUC__ >= 4) || defined(__clang__)
# ifdef IMPLEMENT_INFERENCE_EXTENSION_API
# define INFERENCE_EXTENSION_API(type) extern "C" __attribute__((visibility("default"))) type
# else
# define INFERENCE_EXTENSION_API(type) extern "C" type
# endif
#endif
namespace InferenceEngine {
IE_SUPPRESS_DEPRECATED_START
/**
* @deprecated The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on
* transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html
* @struct DataConfig
* @brief This structure describes data configuration
*/
struct INFERENCE_ENGINE_1_0_DEPRECATED DataConfig {
/**
* @brief Format of memory descriptor
*/
TensorDesc desc;
/**
* @brief Index of in-place memory. If -1 memory cannot be in-place
*/
int inPlace = -1;
/**
* @brief Flag for determination of the constant memory. If layer contains all constant memory we can calculate it
* on the load stage.
*/
bool constant = false;
};
/**
* @deprecated The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on
* transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html
* @struct LayerConfig
* @brief This structure describes Layer configuration
*/
struct INFERENCE_ENGINE_1_0_DEPRECATED LayerConfig {
/**
* @brief Supported dynamic batch. If false, dynamic batch is not supported
*/
bool dynBatchSupport = false;
/**
* @brief Vector of input data configs
*/
std::vector<DataConfig> inConfs;
/**
* @brief Vector of output data configs
*/
std::vector<DataConfig> outConfs;
};
/**
* @deprecated The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on
* transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html
* @interface ILayerImpl
* @brief This class provides interface for extension implementations
*/
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(ILayerImpl) {
public:
/**
* @brief A shared pointer to the ILayerImpl interface
*/
using Ptr = std::shared_ptr<ILayerImpl>;
/**
* @brief Destructor
*/
virtual ~ILayerImpl();
};
/**
* @deprecated The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on
* transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html
* @interface ILayerExecImpl
* @brief This class provides interface for the implementation with the custom execution code
*/
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(ILayerExecImpl) : public ILayerImpl {
public:
/**
* @brief A shared pointer to the ILayerExecImpl interface
*/
using Ptr = std::shared_ptr<ILayerExecImpl>;
/**
* @brief Destructor
*/
virtual ~ILayerExecImpl();
/**
* @brief Gets all supported configurations for the current layer
*
* @param conf Vector with supported configurations
* @param resp Response descriptor
* @return Status code
*/
virtual StatusCode getSupportedConfigurations(std::vector<LayerConfig>& conf, ResponseDesc* resp) noexcept = 0;
/**
* @brief Initializes the implementation
*
* @param config Selected supported configuration
* @param resp Response descriptor
* @return Status code
*/
virtual StatusCode init(LayerConfig& config, ResponseDesc* resp) noexcept = 0;
/**
* @brief Execute method
*
* @param inputs Vector of blobs with input memory
* @param outputs Vector of blobs with output memory
* @param resp Response descriptor
* @return Status code
*/
virtual StatusCode execute(std::vector<Blob::Ptr>& inputs,
std::vector<Blob::Ptr>& outputs,
ResponseDesc* resp) noexcept = 0;
};
/**
* @deprecated The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on
* transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html
* @brief This class is the main extension interface
*/
class INFERENCE_ENGINE_1_0_DEPRECATED INFERENCE_ENGINE_API_CLASS(IExtension)
: public std::enable_shared_from_this<IExtension> {
public:
/**
* @brief Returns operation sets
* This method throws an exception if it was not implemented
* @return map of opset name to opset
*/
virtual std::map<std::string, ngraph::OpSet> getOpSets();
/**
* @brief Returns vector of implementation types
* @param node shared pointer to nGraph op
* @return vector of strings
*/
virtual std::vector<std::string> getImplTypes(const std::shared_ptr<ngraph::Node>& node) {
(void)node;
return {};
}
/**
* @brief Returns implementation for specific nGraph op
* @param node shared pointer to nGraph op
* @param implType implementation type
* @return shared pointer to implementation
*/
virtual ILayerImpl::Ptr getImplementation(const std::shared_ptr<ngraph::Node>& node, const std::string& implType) {
(void)node;
(void)implType;
return nullptr;
}
/**
* @brief Cleans resources up
*/
virtual void Unload() noexcept = 0;
/**
* @brief Gets extension version information and stores in versionInfo
* @param versionInfo Pointer to version info, will be set by plugin
*/
virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
/**
* @brief Implements deprecated API
*/
INFERENCE_ENGINE_DEPRECATED(
"Do not override or use this method. Use IE_DEFINE_EXTENSION_CREATE_FUNCTION to export extension")
virtual void Release() noexcept {
delete this;
}
protected:
virtual ~IExtension() = default;
};
/**
* @brief A shared pointer to a IExtension interface
*/
using IExtensionPtr = std::shared_ptr<IExtension>;
/**
* @brief Creates the default instance of the extension
* @param ext Extension interface
*/
INFERENCE_EXTENSION_API(void) CreateExtensionShared(IExtensionPtr& ext);
/**
* @note: Deprecated API
* @brief Creates the default instance of the extension
* @param ext Extension interface
* @param resp Responce
* @return InferenceEngine::OK if extension is constructed and InferenceEngine::GENERAL_ERROR otherwise
*/
#ifdef _MSC_VER
INFERENCE_EXTENSION_API(StatusCode)
INFERENCE_ENGINE_1_0_DEPRECATED
CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
#else
INFERENCE_EXTENSION_API(StatusCode)
INFERENCE_ENGINE_1_0_DEPRECATED
CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept INFERENCE_ENGINE_DEPRECATED(
"Use IE_DEFINE_EXTENSION_CREATE_FUNCTION macro");
#endif
IE_SUPPRESS_DEPRECATED_END
} // namespace InferenceEngine
/**
* @def IE_CREATE_EXTENSION
* @brief Defines a name of a function creating extension instance
*/
#ifndef IE_CREATE_EXTENSION
# define IE_CREATE_EXTENSION CreateExtensionShared
#endif
/**
* @deprecated The Inference Engine API is deprecated and will be removed in the 2024.0 release. For instructions on
* transitioning to the new API, please refer to https://docs.openvino.ai/latest/openvino_2_0_transition_guide.html
* @def IE_DEFINE_EXTENSION_CREATE_FUNCTION
* @brief Generates extension creation function
*/
#define IE_DEFINE_EXTENSION_CREATE_FUNCTION(ExtensionType) \
INFERENCE_EXTENSION_API(void) \
INFERENCE_ENGINE_1_0_DEPRECATED IE_CREATE_EXTENSION(std::shared_ptr<InferenceEngine::IExtension>& ext); \
void IE_CREATE_EXTENSION(std::shared_ptr<InferenceEngine::IExtension>& ext) { \
ext = std::make_shared<ExtensionType>(); \
}