Skip to content

Commit

Permalink
Fix lint errors
Browse files Browse the repository at this point in the history
  • Loading branch information
Yordan Madzhunkov committed Jun 25, 2024
1 parent ef66a81 commit a1b7461
Show file tree
Hide file tree
Showing 5 changed files with 243 additions and 70 deletions.
22 changes: 9 additions & 13 deletions crates/core/tests/core-wasi-test/src/imagenet.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::ml::test::test::{graph, inference, tensor};
use image2tensor;
use image2tensor::convert_image_to_tensor_bytes;

use crate::imagenet_classes;
Expand Down Expand Up @@ -48,7 +47,7 @@ pub fn imagenet_openvino_test(
let target = map_string_to_execution_target(&target_as_string)?;
let model = {
let start_for_elapsed_macro = std::time::Instant::now();
let model: Vec<u8> = std::fs::read(&path.join("model.xml"))?;
let model: Vec<u8> = std::fs::read(path.join("model.xml"))?;
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!(
"Loaded model from xml {} {}",
Expand All @@ -59,7 +58,7 @@ pub fn imagenet_openvino_test(
};
let weights = {
let start_for_elapsed_macro = std::time::Instant::now();
let weights = std::fs::read(&path.join("model.bin"))?;
let weights = std::fs::read(path.join("model.bin"))?;
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!(
"Loaded weigths {} {}",
Expand Down Expand Up @@ -101,7 +100,6 @@ pub fn imagenet_openvino_test(
image2tensor::TensorType::F32,
image2tensor::ColorOrder::BGR,
)
.or_else(|e| Err(e))
.unwrap();

let tensor_id = {
Expand All @@ -119,18 +117,16 @@ pub fn imagenet_openvino_test(
let input_name = "0";
{
let start_for_elapsed_macro = std::time::Instant::now();
let set_input_result =
inference::GraphExecutionContext::set_input(&context, input_name, tensor_id).unwrap();
inference::GraphExecutionContext::set_input(&context, input_name, tensor_id).unwrap();
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!(
"Input set with ID: {:?} {}",
set_input_result,
"Input set {}",
elapsed_to_string("GraphExecutionContext::set_input", elapsed)
);
}
{
let start_for_elapsed_macro = std::time::Instant::now();
let _infered_result = inference::GraphExecutionContext::compute(&context).unwrap();
inference::GraphExecutionContext::compute(&context).unwrap();
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!(
"Executed graph inference. {}",
Expand Down Expand Up @@ -167,12 +163,12 @@ pub fn imagenet_openvino_test(
{
let output_vec_f32 =
unsafe { std::slice::from_raw_parts(output_data.as_ptr() as *const f32, 1001) };
let results = sort_results(&output_vec_f32);
for i in 0..3 {
let results = sort_results(output_vec_f32);
for res in results.iter().take(3) {
println!(
"{:.2} -> {}",
results[i].weight,
imagenet_classes::IMAGENET_CLASSES[results[i].index],
res.weight,
imagenet_classes::IMAGENET_CLASSES[res.index],
);
}
} else {
Expand Down
62 changes: 19 additions & 43 deletions crates/core/tests/ml_component.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
pub mod ml {
wasmtime::component::bindgen!("ml" in "tests/core-wasi-test/wit");

use std::fmt::format;

//use anyhow::Ok;
//use std::result::Result::Ok;
use spin_core::HostComponent;

use anyhow::{anyhow, Context};
Expand All @@ -17,12 +13,9 @@ pub mod ml {
use test::test::errors::ErrorCode;
use test::test::graph::{ExecutionTarget, Graph, GraphBuilder, GraphEncoding};
use test::test::inference::GraphExecutionContext;
use tokio::sync::Mutex;
use tokio::time::error::Elapsed;
use wasmtime::component::Resource;

use openvino::{Layout, Precision, TensorDesc};
use table;

#[derive(Clone)]
pub struct MLHostComponent;
Expand Down Expand Up @@ -83,10 +76,7 @@ pub mod ml {
message: String,
) -> Resource<errors::Error> {
errors
.push(ErrorInternalData {
code: code,
message: message,
})
.push(ErrorInternalData { code, message })
.map(Resource::<errors::Error>::new_own)
.expect("Can't allocate error")
}
Expand Down Expand Up @@ -129,9 +119,9 @@ pub mod ml {
.create_infer_request()
.context("Can't create InferRequest")?;
let graph_execution_context = GraphExecutionContextInternalData {
cnn_network: cnn_network,
cnn_network,
//executable_network: Mutex::new(exec_network),
infer_request: infer_request,
infer_request,
};
return executions
.push(graph_execution_context)
Expand Down Expand Up @@ -168,16 +158,14 @@ pub mod ml {
let buffer = blob
.buffer()
.map_err(|err| format!("Can't get blob buffer, error = {err}"))?
.iter()
.map(|&d| d as u8)
.collect::<Vec<_>>();
.to_vec();
let tensor_dimensions = tensor_desc
.dims()
.iter()
.map(|&d| d as u32)
.collect::<Vec<_>>();
let tensor = TensorInternalData {
tensor_dimensions: tensor_dimensions,
tensor_dimensions,
tensor_type: map_precision_to_tensor_type(tensor_desc.precision()),
tensor_data: buffer,
};
Expand Down Expand Up @@ -206,10 +194,8 @@ pub mod ml {
}),
};
match res {
Ok(res) => return Ok(res),
Err(e) => {
return Err(MLHostImpl::new_error(&mut self.errors, e.code, e.message));
}
Ok(res) => Ok(res),
Err(e) => Err(MLHostImpl::new_error(&mut self.errors, e.code, e.message)),
}
}

Expand Down Expand Up @@ -258,9 +244,9 @@ pub mod ml {
tensor_data: tensor::TensorData,
) -> Resource<tensor::Tensor> {
let tensor = TensorInternalData {
tensor_dimensions: tensor_dimensions,
tensor_type: tensor_type,
tensor_data: tensor_data,
tensor_dimensions,
tensor_type,
tensor_data,
};
self.tensors
.push(tensor)
Expand Down Expand Up @@ -313,7 +299,7 @@ pub mod ml {
let tensor_resource = self
.tensors
.get(tensor.rep())
.expect(format!("Can't find tensor with ID = {}", tensor.rep()).as_str());
.unwrap_or_else(|| panic!("Can't find tensor with ID = {}", tensor.rep()));
let precision = map_tensor_type_to_precision(tensor_resource.tensor_type);
let dimensions = tensor_resource
.tensor_dimensions
Expand All @@ -326,17 +312,11 @@ pub mod ml {
let execution_context: &mut GraphExecutionContextInternalData = self
.executions
.get_mut(graph_execution_context.rep())
.expect(
format!(
"Can't find graph execution context with ID = {}",
graph_execution_context.rep()
)
.as_str(),
);
.unwrap_or_else(|| panic!("Can't find tensor with ID = {}", tensor.rep()));
let input_name = execution_context
.cnn_network
.get_input_name(index)
.expect(format!("Can't find input with name = {}", index).as_str());
.unwrap_or_else(|_| panic!("Can't find input with name = {}", index));
match execution_context.infer_request.set_blob(&input_name, &blob) {
Ok(res) => Ok(res),
Err(err) => Err(self.new(
Expand Down Expand Up @@ -394,7 +374,7 @@ pub mod ml {
MLHostImpl::new_error(
&mut self.errors,
ErrorCode::RuntimeError,
format!("Can't create tensor for get_output"),
"Can't create tensor for get_output".to_string(),
)
}),
Err(err) => Err(MLHostImpl::new_error(
Expand Down Expand Up @@ -429,34 +409,30 @@ pub mod ml {
return Err(MLHostImpl::new_error(
&mut self.errors,
ErrorCode::RuntimeError,
format!("Expected 2 elements in graph builder vector"),
"Expected 2 elements in graph builder vector".to_string(),
));
}
if graph_encoding != GraphEncoding::Openvino {
return Err(MLHostImpl::new_error(
&mut self.errors,
ErrorCode::RuntimeError,
format!("Only OpenVINO encoding is supported"),
"Only OpenVINO encoding is supported".to_string(),
));
}
// Read the guest array.
let graph_internal_data = GraphInternalData {
xml: graph[0].clone(),
weights: graph[1].clone(),
target: target,
target,
};
match self.graphs.push(graph_internal_data) {
Ok(graph_rep) => {
return Ok(Resource::<Graph>::new_own(graph_rep));
}
Ok(graph_rep) => Ok(Resource::<Graph>::new_own(graph_rep)),
Err(err) => {
match self.errors.push(ErrorInternalData {
code: ErrorCode::RuntimeError,
message: format!("{:?}", err),
}) {
Ok(error_rep) => {
return Err(Resource::<errors::Error>::new_own(error_rep));
}
Ok(error_rep) => Err(Resource::<errors::Error>::new_own(error_rep)),
Err(err) => {
panic!("Can't create internal error for {:?}", err);
}
Expand Down
2 changes: 1 addition & 1 deletion crates/doctor/src/test.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#![cfg(test)]
#![allow(clippy::expect_fun_call)]

use std::{fs, io::Write, path::Path};
use std::{io::Write, path::Path};

use tempfile::{NamedTempFile, TempPath};
use toml::Value;
Expand Down
2 changes: 0 additions & 2 deletions crates/locked-app/src/locked.rs
Original file line number Diff line number Diff line change
Expand Up @@ -294,8 +294,6 @@ pub struct Variable {
mod test {
use super::*;

use crate::values::ValuesMapBuilder;

#[test]
fn locked_app_with_no_host_reqs_serialises_as_v0_and_v0_deserialises_as_v1() {
let locked_app = LockedApp {
Expand Down
Loading

0 comments on commit a1b7461

Please sign in to comment.