From 594e63fb603ec9cbebaf3a6f6e49ba9e0636ab79 Mon Sep 17 00:00:00 2001 From: Yordan Madzhunkov Date: Thu, 20 Jun 2024 17:42:02 +0300 Subject: [PATCH] Format code --- crates/core/build.rs | 38 +-- .../core/tests/core-wasi-test/src/imagenet.rs | 115 +++++-- .../core-wasi-test/src/imagenet_classes.rs | 2 - crates/core/tests/core-wasi-test/src/main.rs | 10 +- crates/core/tests/ml_component.rs | 281 +++++++++--------- 5 files changed, 251 insertions(+), 195 deletions(-) diff --git a/crates/core/build.rs b/crates/core/build.rs index 9e136aa8cf..9303042027 100644 --- a/crates/core/build.rs +++ b/crates/core/build.rs @@ -1,47 +1,47 @@ - use std::env; use std::fs; use std::path::PathBuf; //extern crate curl; -use std::io::Write; use curl::easy::Easy; +use std::io::Write; fn main() { - let base_url = "https://raw.githubusercontent.com/blocksense-network/imagenet_openvino/db44329b8e2b3398c9cc34dd56d94f3ce6fd6e21/";//images/0.jpg + let base_url = "https://raw.githubusercontent.com/blocksense-network/imagenet_openvino/db44329b8e2b3398c9cc34dd56d94f3ce6fd6e21/"; //images/0.jpg - let imagenet_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../target/test-programs/imagenet"); + let imagenet_path = + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../target/test-programs/imagenet"); let images_dir = imagenet_path.join("images"); fs::create_dir_all(images_dir).unwrap(); - let files = [ - "model.xml", - "model.bin", - "images/0.jpg", - "images/1.jpg", - ]; + let files = ["model.xml", "model.bin", "images/0.jpg", "images/1.jpg"]; for file in files { - try_download(&(base_url.to_owned()+file), &imagenet_path.join(file)).unwrap(); + try_download(&(base_url.to_owned() + file), &imagenet_path.join(file)).unwrap(); } - + println!("cargo:rerun-if-changed=build.rs"); } fn try_download(url: &str, filename: &PathBuf) -> Result<(), anyhow::Error> { let mut easy = Easy::new(); - easy.url(url).map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?; + easy.url(url) + .map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?; let mut dst = Vec::new(); { let mut transfer = easy.transfer(); - transfer.write_function(|data| { - dst.extend_from_slice(data); - Ok(data.len()) - }).unwrap(); - transfer.perform().map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?; + transfer + .write_function(|data| { + dst.extend_from_slice(data); + Ok(data.len()) + }) + .unwrap(); + transfer + .perform() + .map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?; } { let mut file = std::fs::File::create(filename)?; file.write_all(dst.as_slice())?; } Ok(()) -} \ No newline at end of file +} diff --git a/crates/core/tests/core-wasi-test/src/imagenet.rs b/crates/core/tests/core-wasi-test/src/imagenet.rs index 6d535451bb..0a200564ab 100644 --- a/crates/core/tests/core-wasi-test/src/imagenet.rs +++ b/crates/core/tests/core-wasi-test/src/imagenet.rs @@ -1,11 +1,9 @@ - - -use crate::ml::test::test::{graph, tensor, inference}; +use crate::ml::test::test::{graph, inference, tensor}; use image2tensor; use image2tensor::convert_image_to_tensor_bytes; -use crate::Path; use crate::imagenet_classes; +use crate::Path; pub fn elapsed_to_string(fn_name: &str, elapsed: u128) -> String { if elapsed < 1000 { @@ -13,7 +11,11 @@ pub fn elapsed_to_string(fn_name: &str, elapsed: u128) -> String { } else if elapsed < 1000 * 1000 { format!("`{}` took {:.2} µs", fn_name, elapsed as f64 / 1000.0) } else { - format!("`{}` took {:.2} ms", fn_name, elapsed as f64 / 1000.0 / 1000.0) + format!( + "`{}` took {:.2} ms", + fn_name, + elapsed as f64 / 1000.0 / 1000.0 + ) } } @@ -33,49 +35,67 @@ fn map_string_to_execution_target(target: &str) -> Result Ok(graph::ExecutionTarget::Cpu), "GPU" => Ok(graph::ExecutionTarget::Gpu), "TPU" => Ok(graph::ExecutionTarget::Tpu), - _ => { - Err(format!("Unknown execution targer = {}", target)) - } + _ => Err(format!("Unknown execution targer = {}", target)), } } - -pub fn imagenet_openvino_test(path_as_string: String, target_as_string: String, image_file: String) -> std::result::Result<(), Box> { +pub fn imagenet_openvino_test( + path_as_string: String, + target_as_string: String, + image_file: String, +) -> std::result::Result<(), Box> { let path = Path::new(&path_as_string); let target = map_string_to_execution_target(&target_as_string)?; let model = { let start_for_elapsed_macro = std::time::Instant::now(); let model: Vec = std::fs::read(&path.join("model.xml"))?; let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Loaded model from xml {} {}", bytes_to_string(model.len()), elapsed_to_string("fs::read", elapsed)); + eprintln!( + "Loaded model from xml {} {}", + bytes_to_string(model.len()), + elapsed_to_string("fs::read", elapsed) + ); model }; let weights = { let start_for_elapsed_macro = std::time::Instant::now(); let weights = std::fs::read(&path.join("model.bin"))?; let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Loaded weigths {} {}", bytes_to_string(weights.len()), elapsed_to_string("fs::read", elapsed)); + eprintln!( + "Loaded weigths {} {}", + bytes_to_string(weights.len()), + elapsed_to_string("fs::read", elapsed) + ); weights }; let imagenet_graph = { let start_for_elapsed_macro = std::time::Instant::now(); - let imagenet_graph = graph::load(&[model, weights], graph::GraphEncoding::Openvino, target).unwrap(); + let imagenet_graph = + graph::load(&[model, weights], graph::GraphEncoding::Openvino, target).unwrap(); let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); eprintln!("---- {:?} ----", target); - eprintln!("Loaded graph with ID: {:?} {}", imagenet_graph, elapsed_to_string("graph::load", elapsed)); + eprintln!( + "Loaded graph with ID: {:?} {}", + imagenet_graph, + elapsed_to_string("graph::load", elapsed) + ); imagenet_graph }; let context = { let start_for_elapsed_macro = std::time::Instant::now(); let context = graph::Graph::init_execution_context(&imagenet_graph).unwrap(); let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Created context with ID: {:?} {}", context, elapsed_to_string("Graph::init_execution_context", elapsed)); + eprintln!( + "Created context with ID: {:?} {}", + context, + elapsed_to_string("Graph::init_execution_context", elapsed) + ); context }; - let tensor_dimensions:Vec = vec![1, 3, 224, 224]; + let tensor_dimensions: Vec = vec![1, 3, 224, 224]; let tensor_data = convert_image_to_tensor_bytes( - &image_file,//"images/0.jpg", + &image_file, //"images/0.jpg", tensor_dimensions[2], tensor_dimensions[3], image2tensor::TensorType::F32, @@ -83,47 +103,70 @@ pub fn imagenet_openvino_test(path_as_string: String, target_as_string: String, ) .or_else(|e| Err(e)) .unwrap(); - - + let tensor_id = { let start_for_elapsed_macro = std::time::Instant::now(); let tensor_type = tensor::TensorType::Fp32; let tensor_id = tensor::Tensor::new(&tensor_dimensions, tensor_type, &tensor_data); let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Created tensor with ID: {:?} {}", tensor_id, elapsed_to_string("Tensor::new", elapsed)); + eprintln!( + "Created tensor with ID: {:?} {}", + tensor_id, + elapsed_to_string("Tensor::new", elapsed) + ); tensor_id }; let input_name = "0"; { let start_for_elapsed_macro = std::time::Instant::now(); - let set_input_result = inference::GraphExecutionContext::set_input(&context, input_name, tensor_id).unwrap(); + let set_input_result = + inference::GraphExecutionContext::set_input(&context, input_name, tensor_id).unwrap(); let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Input set with ID: {:?} {}", set_input_result, elapsed_to_string("GraphExecutionContext::set_input", elapsed)); + eprintln!( + "Input set with ID: {:?} {}", + set_input_result, + elapsed_to_string("GraphExecutionContext::set_input", elapsed) + ); } { let start_for_elapsed_macro = std::time::Instant::now(); let _infered_result = inference::GraphExecutionContext::compute(&context).unwrap(); let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Executed graph inference. {}", elapsed_to_string("GraphExecutionContext::compute", elapsed)); + eprintln!( + "Executed graph inference. {}", + elapsed_to_string("GraphExecutionContext::compute", elapsed) + ); } - let output_result_id = { + let output_result_id = { let start_for_elapsed_macro = std::time::Instant::now(); - let output_result_id = inference::GraphExecutionContext::get_output(&context, input_name).unwrap(); + let output_result_id = + inference::GraphExecutionContext::get_output(&context, input_name).unwrap(); let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Obtaining output {}", elapsed_to_string("GraphExecutionContext::get_output", elapsed)); + eprintln!( + "Obtaining output {}", + elapsed_to_string("GraphExecutionContext::get_output", elapsed) + ); output_result_id }; - let (output_data, output_dimensions, output_type) = { + let (output_data, output_dimensions, output_type) = { let start_for_elapsed_macro = std::time::Instant::now(); let output_data = tensor::Tensor::data(&output_result_id); let output_dimensions = tensor::Tensor::dimensions(&output_result_id); let output_type = tensor::Tensor::ty(&output_result_id); let elapsed = start_for_elapsed_macro.elapsed().as_nanos(); - eprintln!("Copying data from tensor. {}", elapsed_to_string("Tensor::data+dimensions+type", elapsed)); + eprintln!( + "Copying data from tensor. {}", + elapsed_to_string("Tensor::data+dimensions+type", elapsed) + ); (output_data, output_dimensions, output_type) }; - if output_dimensions.len() == 2 && output_dimensions[0] == 1 && output_dimensions[1] == 1001 && output_type == tensor::TensorType::Fp32 { - let output_vec_f32 = unsafe { std::slice::from_raw_parts(output_data.as_ptr() as *const f32, 1001) }; + if output_dimensions.len() == 2 + && output_dimensions[0] == 1 + && output_dimensions[1] == 1001 + && output_type == tensor::TensorType::Fp32 + { + let output_vec_f32 = + unsafe { std::slice::from_raw_parts(output_data.as_ptr() as *const f32, 1001) }; let results = sort_results(&output_vec_f32); for i in 0..3 { println!( @@ -133,7 +176,10 @@ pub fn imagenet_openvino_test(path_as_string: String, target_as_string: String, ); } } else { - eprintln!("Output not as expected, output = {:?} {:?}", &output_dimensions, &output_type); + eprintln!( + "Output not as expected, output = {:?} {:?}", + &output_dimensions, &output_type + ); } Ok(()) } @@ -146,7 +192,10 @@ pub fn sort_results(buffer: &[f32]) -> Vec { .iter() .skip(1) .enumerate() - .map(|(c, p)| InferenceResult{ index: c, weight: *p}) + .map(|(c, p)| InferenceResult { + index: c, + weight: *p, + }) .collect(); results.sort_by(|a, b| b.weight.partial_cmp(&a.weight).unwrap()); results @@ -154,7 +203,7 @@ pub fn sort_results(buffer: &[f32]) -> Vec { // A wrapper for class ID and match probabilities. #[derive(Debug, PartialEq)] -pub struct InferenceResult{ +pub struct InferenceResult { pub index: usize, pub weight: f32, } diff --git a/crates/core/tests/core-wasi-test/src/imagenet_classes.rs b/crates/core/tests/core-wasi-test/src/imagenet_classes.rs index 2ab1ed9bcb..6aab4910c3 100644 --- a/crates/core/tests/core-wasi-test/src/imagenet_classes.rs +++ b/crates/core/tests/core-wasi-test/src/imagenet_classes.rs @@ -1019,5 +1019,3 @@ pub const IMAGENET_CLASSES: [&str; 1000] = [ "ear, spike, capitulum", "toilet tissue, toilet paper, bathroom tissue" ]; - - diff --git a/crates/core/tests/core-wasi-test/src/main.rs b/crates/core/tests/core-wasi-test/src/main.rs index 93cbb7010e..80a5ed73ae 100644 --- a/crates/core/tests/core-wasi-test/src/main.rs +++ b/crates/core/tests/core-wasi-test/src/main.rs @@ -24,22 +24,17 @@ mod ml { }); } -mod imagenet_classes; mod imagenet; +mod imagenet_classes; use imagenet::imagenet_openvino_test; - - use std::path::Path; use crate::hello::test::test::gggg2::say_hello; - - type Result = std::result::Result<(), Box>; - fn main() -> Result { let mut args = std::env::args(); let cmd = args.next().expect("cmd"); @@ -89,7 +84,6 @@ fn main() -> Result { let target_as_string = args.next().expect("target"); let image_file_as_string = args.next().expect("image_file"); _ = imagenet_openvino_test(path_as_string, target_as_string, image_file_as_string); - } "sleep" => { let duration = @@ -104,4 +98,4 @@ fn main() -> Result { cmd => panic!("unknown cmd {cmd}"), }; Ok(()) -} \ No newline at end of file +} diff --git a/crates/core/tests/ml_component.rs b/crates/core/tests/ml_component.rs index bfb28f3a26..2fd5578c6e 100644 --- a/crates/core/tests/ml_component.rs +++ b/crates/core/tests/ml_component.rs @@ -1,7 +1,6 @@ pub mod ml { wasmtime::component::bindgen!("ml" in "tests/core-wasi-test/wit"); - use std::fmt::format; //use anyhow::Ok; @@ -53,7 +52,7 @@ pub mod ml { pub struct GraphExecutionContextInternalData { pub cnn_network: openvino::CNNNetwork, - pub executable_network: Mutex, + //pub executable_network: Mutex, pub infer_request: openvino::InferRequest, } @@ -77,15 +76,19 @@ pub mod ml { pub errors: table::Table, } - - impl MLHostImpl { - - fn new_error(errors: &mut table::Table, code: ErrorCode, message: String) -> Resource { + fn new_error( + errors: &mut table::Table, + code: ErrorCode, + message: String, + ) -> Resource { errors - .push(ErrorInternalData { code: code, message: message,}) - .map(Resource::::new_own) - .expect("Can't allocate error") + .push(ErrorInternalData { + code: code, + message: message, + }) + .map(Resource::::new_own) + .expect("Can't allocate error") } fn init_execution_context_internal( @@ -95,64 +98,73 @@ pub mod ml { ) -> Result, anyhow::Error> { if openvino.is_none() { openvino.replace(openvino::Core::new(None)?); - } + } if openvino.is_some() { let mut cnn_network = openvino .as_mut() .context("Can't create openvino graph without backend")? .read_network_from_buffer(&graph.xml, &graph.weights)?; - // Construct OpenVINO graph structures: `cnn_network` contains the graph - // structure, `exec_network` can perform inference. - //let core = self - // .0 - // .as_mut() - // .expect("openvino::Core was previously constructed"); - //let mut cnn_network = core.read_network_from_buffer(&xml, &weights)?; - - // TODO: this is a temporary workaround. We need a more elegant way to - // specify the layout in the long run. However, without this newer - // versions of OpenVINO will fail due to parameter mismatch. - for i in 0..cnn_network.get_inputs_len().unwrap() { - let name = cnn_network.get_input_name(i)?; - cnn_network.set_input_layout(&name, Layout::NHWC)?; - } + // Construct OpenVINO graph structures: `cnn_network` contains the graph + // structure, `exec_network` can perform inference. + //let core = self + // .0 + // .as_mut() + // .expect("openvino::Core was previously constructed"); + //let mut cnn_network = core.read_network_from_buffer(&xml, &weights)?; + + // TODO: this is a temporary workaround. We need a more elegant way to + // specify the layout in the long run. However, without this newer + // versions of OpenVINO will fail due to parameter mismatch. + for i in 0..cnn_network.get_inputs_len().unwrap() { + let name = cnn_network.get_input_name(i)?; + cnn_network.set_input_layout(&name, Layout::NHWC)?; + } - let mut exec_network = openvino - .as_mut() - .expect("") - .load_network(&cnn_network, map_execution_target_to_string(graph.target))?; - let infer_request = exec_network - .create_infer_request() - .context("Can't create InferRequest")?; - let graph_execution_context = GraphExecutionContextInternalData { - cnn_network: cnn_network, - executable_network: Mutex::new(exec_network), - infer_request: infer_request, - }; - return executions - .push(graph_execution_context) - .map(Resource::::new_own) - .map_err(|_|anyhow!("Can't store execution context")); + let mut exec_network = openvino + .as_mut() + .expect("") + .load_network(&cnn_network, map_execution_target_to_string(graph.target))?; + let infer_request = exec_network + .create_infer_request() + .context("Can't create InferRequest")?; + let graph_execution_context = GraphExecutionContextInternalData { + cnn_network: cnn_network, + //executable_network: Mutex::new(exec_network), + infer_request: infer_request, + }; + return executions + .push(graph_execution_context) + .map(Resource::::new_own) + .map_err(|_| anyhow!("Can't store execution context")); } Err(anyhow!("Can't create openvino backend")) } - fn get_output_internal(graph_execution: &mut GraphExecutionContextInternalData, input_name: String) -> Result { - let index = input_name - .parse::() - .map_err(|err| format!("Can't parse {} to usize for input_name", input_name))?; + fn get_output_internal( + graph_execution: &mut GraphExecutionContextInternalData, + input_name: String, + ) -> Result { + let index = input_name.parse::().map_err(|err| { + format!( + "Can't parse {} to usize for input_name, err = {err}", + input_name + ) + })?; let output_name = graph_execution .cnn_network .get_output_name(index) - .map_err(|err| format!("Can't find output name for ID = {index}"))?; + .map_err(|err| format!("Can't find output name for ID = {index}, err = {err}"))?; let blob = graph_execution .infer_request .get_blob(&output_name) - .map_err(|err| format!("Can't get blob for output name = {output_name}"))?; - let tensor_desc = blob.tensor_desc() - .map_err(|err| format!("Can't get blob description"))?; + .map_err(|err| { + format!("Can't get blob for output name = {output_name}, err = {err}") + })?; + let tensor_desc = blob + .tensor_desc() + .map_err(|err| format!("Can't get blob description, err = {err}"))?; let buffer = blob .buffer() .map_err(|err| format!("Can't get blob buffer, error = {err}"))? @@ -171,7 +183,6 @@ pub mod ml { }; Ok(tensor) } - } impl graph::HostGraph for MLHostImpl { @@ -180,24 +191,22 @@ pub mod ml { graph: Resource, ) -> Result, Resource> { let res = match self.graphs.get(graph.rep()) { - Some(graph) => { - MLHostImpl::init_execution_context_internal(graph, &mut self.openvino, &mut self.executions) - .map_err(|err| - ErrorInternalData { - code: ErrorCode::RuntimeError, - message: err.to_string(), - } + Some(graph) => MLHostImpl::init_execution_context_internal( + graph, + &mut self.openvino, + &mut self.executions, ) - } - None => { - Err(ErrorInternalData { - code: ErrorCode::RuntimeError, - message: "Can't create graph execution context".to_string(), - }) - } + .map_err(|err| ErrorInternalData { + code: ErrorCode::RuntimeError, + message: err.to_string(), + }), + None => Err(ErrorInternalData { + code: ErrorCode::RuntimeError, + message: "Can't create graph execution context".to_string(), + }), }; match res { - Ok(res) => { return Ok(res) } + Ok(res) => return Ok(res), Err(e) => { return Err(MLHostImpl::new_error(&mut self.errors, e.code, e.message)); } @@ -207,25 +216,24 @@ pub mod ml { fn drop(&mut self, graph: Resource) -> Result<(), anyhow::Error> { self.graphs .remove(graph.rep()) - .context(format!("Can't find graph with ID = {}", graph.rep()))?; - Ok(()) + .context(format!("Can't find graph with ID = {}", graph.rep())) + .map(|_| ()) } } impl errors::HostError for MLHostImpl { - fn new( - &mut self, - code: errors::ErrorCode, - message: String, - ) -> Resource { + fn new(&mut self, code: errors::ErrorCode, message: String) -> Resource { MLHostImpl::new_error(&mut self.errors, code, message) } - fn drop(&mut self, error: Resource) -> std::result::Result<(), anyhow::Error> { - if let Some(e) = self.errors.remove(error.rep()) { - return Ok(()); - } - Err(anyhow!("Can't find error with ID = {}", error.rep())) + fn drop( + &mut self, + error: Resource, + ) -> std::result::Result<(), anyhow::Error> { + self.errors + .remove(error.rep()) + .context(format!("Can't find error with ID = {}", error.rep())) + .map(|_| ()) } fn code(&mut self, error: Resource) -> ErrorCode { @@ -259,42 +267,34 @@ pub mod ml { .map(Resource::::new_own) .expect("Can't allocate tensor") } - fn dimensions( - &mut self, - tensor: Resource, - ) -> Vec { + fn dimensions(&mut self, tensor: Resource) -> Vec { if let Some(t) = self.tensors.get(tensor.rep()) { return t.tensor_dimensions.clone(); } panic!("Can't find tensor with ID = {}", tensor.rep()); } - fn ty( - &mut self, - tensor: Resource, - ) -> tensor::TensorType { + fn ty(&mut self, tensor: Resource) -> tensor::TensorType { if let Some(t) = self.tensors.get(tensor.rep()) { return t.tensor_type; } panic!("Can't find tensor with ID = {}", tensor.rep()); } - fn data( - &mut self, - tensor: Resource, - ) -> tensor::TensorData { + fn data(&mut self, tensor: Resource) -> tensor::TensorData { if let Some(t) = self.tensors.get(tensor.rep()) { return t.tensor_data.clone(); } panic!("Can't find tensor with ID = {}", tensor.rep()); } - fn drop(&mut self, tensor: Resource) -> std::result::Result<(), anyhow::Error> { + fn drop( + &mut self, + tensor: Resource, + ) -> std::result::Result<(), anyhow::Error> { self.tensors .remove(tensor.rep()) .context(format!("Can't find tensor with ID = {}", tensor.rep())) - .map(|_|()) - - + .map(|_| ()) } } @@ -321,27 +321,29 @@ pub mod ml { .map(|&d| d as usize) .collect::>(); let desc = TensorDesc::new(Layout::NHWC, &dimensions, precision); - let blob = openvino::Blob::new(&desc, &tensor_resource.tensor_data).expect("Error in Blob::new"); + let blob = openvino::Blob::new(&desc, &tensor_resource.tensor_data) + .expect("Error in Blob::new"); let execution_context: &mut GraphExecutionContextInternalData = self .executions .get_mut(graph_execution_context.rep()) - .expect(format!( - "Can't find graph execution context with ID = {}", - graph_execution_context.rep() - ).as_str()); + .expect( + format!( + "Can't find graph execution context with ID = {}", + graph_execution_context.rep() + ) + .as_str(), + ); let input_name = execution_context .cnn_network .get_input_name(index) .expect(format!("Can't find input with name = {}", index).as_str()); match execution_context.infer_request.set_blob(&input_name, &blob) { Ok(res) => Ok(res), - Err(err) => Err( - self.new( + Err(err) => Err(self.new( ErrorCode::RuntimeError, format!("Inference error = {:?}", err.to_string()), )), } - } fn compute( @@ -351,16 +353,21 @@ pub mod ml { let graph_execution = self .executions .get_mut(graph_execution_context.rep()) - .ok_or(MLHostImpl::new_error(&mut self.errors, - ErrorCode::RuntimeError, - format!("Can't find graph execution context with ID = {}", graph_execution_context.rep())))?; + .ok_or(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!( + "Can't find graph execution context with ID = {}", + graph_execution_context.rep() + ), + ))?; match graph_execution.infer_request.infer() { Ok(..) => Ok(()), - Err(err) => Err( - MLHostImpl::new_error(&mut self.errors, - ErrorCode::RuntimeError, - format!("Inference error = {:?}", err.to_string())) - ), + Err(err) => Err(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!("Inference error = {:?}", err.to_string()), + )), } } @@ -368,28 +375,34 @@ pub mod ml { &mut self, graph_execution_context: Resource, input_name: String, - ) -> Result, Resource> - { - + ) -> Result, Resource> { let graph_execution = self .executions .get_mut(graph_execution_context.rep()) - .ok_or(format!("Can't find graph execution context with ID = {}", graph_execution_context.rep())).unwrap(); + .ok_or(format!( + "Can't find graph execution context with ID = {}", + graph_execution_context.rep() + )) + .unwrap(); match MLHostImpl::get_output_internal(graph_execution, input_name) { - Ok(tensor) => { - self.tensors + Ok(tensor) => self + .tensors .push(tensor) .map(Resource::::new_own) - .map_err(|_| self.new(ErrorCode::RuntimeError, format!("Can't create tensor for get_output"))) - } - Err(err) => { - Err(MLHostImpl::new_error(&mut self.errors, - ErrorCode::RuntimeError, - err)) - } + .map_err(|_| { + MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!("Can't create tensor for get_output"), + ) + }), + Err(err) => Err(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + err, + )), } - } fn drop( @@ -400,9 +413,7 @@ pub mod ml { self.executions .remove(id) .context("{Can't drow GraphExecutionContext with id = {id}") - .map(|_|()) - - + .map(|_| ()) } } @@ -415,14 +426,18 @@ pub mod ml { target: ExecutionTarget, ) -> Result, Resource> { if graph.len() != 2 { - return Err(MLHostImpl::new_error(&mut self.errors, - ErrorCode::RuntimeError, - format!("Expected 2 elements in graph builder vector"))); + return Err(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!("Expected 2 elements in graph builder vector"), + )); } if graph_encoding != GraphEncoding::Openvino { - return Err(MLHostImpl::new_error(&mut self.errors, - ErrorCode::RuntimeError, - format!("Only OpenVINO encoding is supported"))); + return Err(MLHostImpl::new_error( + &mut self.errors, + ErrorCode::RuntimeError, + format!("Only OpenVINO encoding is supported"), + )); } // Read the guest array. let graph_internal_data = GraphInternalData { @@ -453,7 +468,7 @@ pub mod ml { &mut self, _graph: String, ) -> Result, Resource> { - panic!("[graph::Host] fn load_by_name -> Not implemented"); + panic!("[graph::Host] fn load_by_name -> Not implemented"); } }