Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for model reshape in protopipe #27398

Open
wants to merge 23 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
1c51f54
building
SeptimiuIoachimNeagaIntel Oct 25, 2024
b7e9f9a
Building and working
SeptimiuIoachimNeagaIntel Oct 28, 2024
22bafff
Building
SeptimiuIoachimNeagaIntel Oct 28, 2024
fae44b5
Refactoring
SeptimiuIoachimNeagaIntel Oct 28, 2024
8fbadee
Refactoring
SeptimiuIoachimNeagaIntel Oct 29, 2024
375f7f1
Refactoring
SeptimiuIoachimNeagaIntel Oct 29, 2024
adf197a
Refactoring
SeptimiuIoachimNeagaIntel Nov 5, 2024
b08f5d9
Documentation
SeptimiuIoachimNeagaIntel Nov 5, 2024
76c8b38
option "shape" renamed to "reshape"
SeptimiuIoachimNeagaIntel Nov 5, 2024
838eb39
Refactoring after code review
SeptimiuIoachimNeagaIntel Nov 5, 2024
97f7c1a
Refactoring after code review
SeptimiuIoachimNeagaIntel Nov 5, 2024
b775841
Refactoring after code review
SeptimiuIoachimNeagaIntel Nov 5, 2024
13958f6
Code changes after review
SeptimiuIoachimNeagaIntel Nov 11, 2024
fdbdd7f
Test
SeptimiuIoachimNeagaIntel Nov 11, 2024
63c30fe
Test
SeptimiuIoachimNeagaIntel Nov 12, 2024
8a4f667
Working
SeptimiuIoachimNeagaIntel Nov 13, 2024
b716864
Fix
SeptimiuIoachimNeagaIntel Nov 13, 2024
6e0cb65
Merge branch 'master' into EISW-142643_dynamic_shape
SeptimiuIoachimNeagaIntel Nov 13, 2024
c116f8f
Changes after code review
SeptimiuIoachimNeagaIntel Nov 14, 2024
2f33907
Refactoring
SeptimiuIoachimNeagaIntel Nov 14, 2024
00e1cfc
Code changes after code review
SeptimiuIoachimNeagaIntel Nov 14, 2024
0dee1d2
Fixed warning
SeptimiuIoachimNeagaIntel Nov 14, 2024
e6ef9f6
Code changes after review
SeptimiuIoachimNeagaIntel Nov 15, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/plugins/intel_npu/tools/protopipe/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ log_level: INFO
- `ol` - **Optional**. Output layer layout.
- `iml` - **Optional**. Input model layout.
- `oml` - **Optional**. Output model layout.
- `reshape` - **Optional**. Set shape for input layers. For example, "input1: [1,3,224,224], input2: [1,4]" or "[1,3,224,224]" in case of one input layer.

Examples:
```
Expand Down
4 changes: 4 additions & 0 deletions src/plugins/intel_npu/tools/protopipe/src/parser/config.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -345,6 +345,10 @@ struct convert<OpenVINOParams> {
params.output_model_layout = node["oml"].as<LayerVariantAttr<std::string>>();
}

if (node["reshape"]) {
params.reshape = node["reshape"].as<LayerVariantAttr<std::vector<size_t>>> ();
}

if (node["config"]) {
params.config = node["config"].as<std::map<std::string, std::string>>();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ struct OpenVINOParams {
LayerVariantAttr<std::string> output_layout;
LayerVariantAttr<std::string> input_model_layout;
LayerVariantAttr<std::string> output_model_layout;
LayerVariantAttr<std::vector<size_t>> reshape;
std::map<std::string, std::string> config;
size_t nireq = 1u;
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,25 @@ static std::string getModelFileName(const InferenceParams& params) {
ASSERT(false);
}

static void adjustDynamicDims(LayersInfo& layers, std::string layer_type) {
for (auto& layer : layers) {
auto& dims = layer.dims;
if (std::find(dims.begin(), dims.end(), -1) == dims.end()) continue;

std::stringstream info_msg;
info_msg
<< "Dynamic shape: [";
std::copy(dims.begin(), dims.end(), std::ostream_iterator<int>(info_msg, " "));
info_msg << "] has been detected for layer "
<< layer.name
<< ". Data with shape [";
std::for_each(dims.begin(), dims.end(), [](int &v) { v = v == -1 ? -v : v; });
std::copy(dims.begin(), dims.end(), std::ostream_iterator<int>(info_msg, " "));
info_msg << "] will be provided as "<< layer_type <<" for this layer.";
LOG_INFO() << info_msg.str() << std::endl;
}
}

InOutLayers LayersReader::readLayers(const InferenceParams& params) {
LOG_INFO() << "Reading model " << getModelFileName(params) << std::endl;
if (std::holds_alternative<OpenVINOParams>(params)) {
Expand All @@ -42,5 +61,9 @@ InOutLayers LayersReader::readLayers(const InferenceParams& params) {
// NB: Using OpenVINO to read the i/o layers information for *.onnx model
OpenVINOParams ov;
ov.path = OpenVINOParams::ModelPath{ort.model_path, ""};
return getOVReader().readLayers(ov, true /* use_results_names */);
auto inOutLayers = getOVReader().readLayers(ov, true /* use_results_names */);
adjustDynamicDims(inOutLayers.in_layers, "input");
adjustDynamicDims(inOutLayers.out_layers, "output");

return inOutLayers;
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,12 @@ static ov::element::Type toElementType(int cvdepth) {
throw std::logic_error("Failed to convert opencv depth to ov::element::Type");
}

static std::vector<int> toDims(const std::vector<size_t>& sz_vec) {
static std::vector<int> toDims(const ov::PartialShape& partial_shape) {
std::vector<int> result;
result.reserve(sz_vec.size());
for (auto sz : sz_vec) {
// FIXME: Probably requires some check...
result.push_back(static_cast<int>(sz));
}

result.reserve(partial_shape.size());
auto to_int = [](auto dim) { return dim.is_dynamic() ? -1 : static_cast<int>(dim.get_length()); };
std::transform(partial_shape.begin(), partial_shape.end(), std::back_inserter(result), to_int);
return result;
}

Expand All @@ -75,7 +74,7 @@ std::vector<LayerInfo> ovToLayersInfo(const InfoVec& vec) {
std::vector<LayerInfo> layers;
layers.reserve(vec.size());
std::transform(vec.begin(), vec.end(), std::back_inserter(layers), [](const auto& node) {
return LayerInfo{node.get_any_name(), toDims(node.get_shape()), toPrecision(node.get_element_type())};
return LayerInfo{node.get_any_name(), toDims(node.get_partial_shape()), toPrecision(node.get_element_type())};
});
return layers;
};
Expand Down Expand Up @@ -128,6 +127,15 @@ static void cfgOutputPostproc(ov::preprocess::PrePostProcessor& ppp, const std::
}
}

static void cfgReshape(const std::shared_ptr<ov::Model>& model,
const AttrMap<std::vector<size_t>> reshape_map) {
std::map<std::string, ov::PartialShape> partial_shapes;
for (const auto& [layer_name, shape] : reshape_map) {
partial_shapes.emplace(layer_name, shape);
}
model->reshape(partial_shapes);
}

static std::vector<std::string> extractLayerNames(const std::vector<ov::Output<ov::Node>>& nodes) {
std::vector<std::string> names;
std::transform(nodes.begin(), nodes.end(), std::back_inserter(names), [](const auto& node) {
Expand All @@ -148,6 +156,9 @@ InOutLayers OpenVINOLayersReader::Impl::readFromModel(const std::string& model_p
const auto iml_map = unpackLayerAttr(params.input_model_layout, input_names, "input model layout");
cfgInputPreproc(ppp, model, ip_map, il_map, iml_map);

const auto reshape_map = unpackLayerAttr(params.reshape, input_names, "reshape");
cfgReshape(model, reshape_map);

const auto& output_names = extractLayerNames(model->outputs());
const auto op_map = unpackLayerAttr(params.output_precision, output_names, "output precision");
const auto ol_map = unpackLayerAttr(params.output_layout, output_names, "output layout");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ static cv::gapi::GNetPackage getNetPackage(const std::string& tag, const OpenVIN
const auto& blob_path = std::get<OpenVINOParams::BlobPath>(params.path);
network = std::make_unique<P>(tag, blob_path.blob, params.device);
}
if (std::holds_alternative<AttrMap<std::vector<size_t>>>(params.reshape)) {
network->cfgReshape(std::get<AttrMap<std::vector<size_t>>>(params.reshape));
} else {
network->cfgReshape(std::get<std::vector<size_t>>(params.reshape));
}

network->cfgPluginConfig(params.config);
network->cfgNumRequests(params.nireq);
Expand Down
Loading