Skip to content

Commit

Permalink
Format code
Browse files Browse the repository at this point in the history
  • Loading branch information
Yordan Madzhunkov committed Jun 20, 2024
1 parent 5fa7311 commit 594e63f
Show file tree
Hide file tree
Showing 5 changed files with 251 additions and 195 deletions.
38 changes: 19 additions & 19 deletions crates/core/build.rs
Original file line number Diff line number Diff line change
@@ -1,47 +1,47 @@

use std::env;
use std::fs;
use std::path::PathBuf;
//extern crate curl;

use std::io::Write;
use curl::easy::Easy;
use std::io::Write;

fn main() {
let base_url = "https://raw.githubusercontent.com/blocksense-network/imagenet_openvino/db44329b8e2b3398c9cc34dd56d94f3ce6fd6e21/";//images/0.jpg
let base_url = "https://raw.githubusercontent.com/blocksense-network/imagenet_openvino/db44329b8e2b3398c9cc34dd56d94f3ce6fd6e21/"; //images/0.jpg

let imagenet_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../target/test-programs/imagenet");
let imagenet_path =
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../target/test-programs/imagenet");
let images_dir = imagenet_path.join("images");
fs::create_dir_all(images_dir).unwrap();
let files = [
"model.xml",
"model.bin",
"images/0.jpg",
"images/1.jpg",
];
let files = ["model.xml", "model.bin", "images/0.jpg", "images/1.jpg"];
for file in files {
try_download(&(base_url.to_owned()+file), &imagenet_path.join(file)).unwrap();
try_download(&(base_url.to_owned() + file), &imagenet_path.join(file)).unwrap();
}

println!("cargo:rerun-if-changed=build.rs");
}

fn try_download(url: &str, filename: &PathBuf) -> Result<(), anyhow::Error> {
let mut easy = Easy::new();
easy.url(url).map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?;
easy.url(url)
.map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?;

let mut dst = Vec::new();
{
let mut transfer = easy.transfer();
transfer.write_function(|data| {
dst.extend_from_slice(data);
Ok(data.len())
}).unwrap();
transfer.perform().map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?;
transfer
.write_function(|data| {
dst.extend_from_slice(data);
Ok(data.len())
})
.unwrap();
transfer
.perform()
.map_err(|e| anyhow::anyhow!("Error {} when downloading {}", e.to_string(), url))?;
}
{
let mut file = std::fs::File::create(filename)?;
file.write_all(dst.as_slice())?;
}
Ok(())
}
}
115 changes: 82 additions & 33 deletions crates/core/tests/core-wasi-test/src/imagenet.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@


use crate::ml::test::test::{graph, tensor, inference};
use crate::ml::test::test::{graph, inference, tensor};
use image2tensor;
use image2tensor::convert_image_to_tensor_bytes;

use crate::Path;
use crate::imagenet_classes;
use crate::Path;

pub fn elapsed_to_string(fn_name: &str, elapsed: u128) -> String {
if elapsed < 1000 {
format!("`{}` took {} ns", fn_name, elapsed)
} else if elapsed < 1000 * 1000 {
format!("`{}` took {:.2} µs", fn_name, elapsed as f64 / 1000.0)
} else {
format!("`{}` took {:.2} ms", fn_name, elapsed as f64 / 1000.0 / 1000.0)
format!(
"`{}` took {:.2} ms",
fn_name,
elapsed as f64 / 1000.0 / 1000.0
)
}
}

Expand All @@ -33,97 +35,138 @@ fn map_string_to_execution_target(target: &str) -> Result<graph::ExecutionTarget
"CPU" => Ok(graph::ExecutionTarget::Cpu),
"GPU" => Ok(graph::ExecutionTarget::Gpu),
"TPU" => Ok(graph::ExecutionTarget::Tpu),
_ => {
Err(format!("Unknown execution targer = {}", target))
}
_ => Err(format!("Unknown execution targer = {}", target)),
}
}


pub fn imagenet_openvino_test(path_as_string: String, target_as_string: String, image_file: String) -> std::result::Result<(), Box<dyn std::error::Error>> {
pub fn imagenet_openvino_test(
path_as_string: String,
target_as_string: String,
image_file: String,
) -> std::result::Result<(), Box<dyn std::error::Error>> {
let path = Path::new(&path_as_string);
let target = map_string_to_execution_target(&target_as_string)?;
let model = {
let start_for_elapsed_macro = std::time::Instant::now();
let model: Vec<u8> = std::fs::read(&path.join("model.xml"))?;
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Loaded model from xml {} {}", bytes_to_string(model.len()), elapsed_to_string("fs::read", elapsed));
eprintln!(
"Loaded model from xml {} {}",
bytes_to_string(model.len()),
elapsed_to_string("fs::read", elapsed)
);
model
};
let weights = {
let start_for_elapsed_macro = std::time::Instant::now();
let weights = std::fs::read(&path.join("model.bin"))?;
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Loaded weigths {} {}", bytes_to_string(weights.len()), elapsed_to_string("fs::read", elapsed));
eprintln!(
"Loaded weigths {} {}",
bytes_to_string(weights.len()),
elapsed_to_string("fs::read", elapsed)
);
weights
};
let imagenet_graph = {
let start_for_elapsed_macro = std::time::Instant::now();
let imagenet_graph = graph::load(&[model, weights], graph::GraphEncoding::Openvino, target).unwrap();
let imagenet_graph =
graph::load(&[model, weights], graph::GraphEncoding::Openvino, target).unwrap();
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("---- {:?} ----", target);
eprintln!("Loaded graph with ID: {:?} {}", imagenet_graph, elapsed_to_string("graph::load", elapsed));
eprintln!(
"Loaded graph with ID: {:?} {}",
imagenet_graph,
elapsed_to_string("graph::load", elapsed)
);
imagenet_graph
};
let context = {
let start_for_elapsed_macro = std::time::Instant::now();
let context = graph::Graph::init_execution_context(&imagenet_graph).unwrap();
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Created context with ID: {:?} {}", context, elapsed_to_string("Graph::init_execution_context", elapsed));
eprintln!(
"Created context with ID: {:?} {}",
context,
elapsed_to_string("Graph::init_execution_context", elapsed)
);
context
};

let tensor_dimensions:Vec<u32> = vec![1, 3, 224, 224];
let tensor_dimensions: Vec<u32> = vec![1, 3, 224, 224];
let tensor_data = convert_image_to_tensor_bytes(
&image_file,//"images/0.jpg",
&image_file, //"images/0.jpg",
tensor_dimensions[2],
tensor_dimensions[3],
image2tensor::TensorType::F32,
image2tensor::ColorOrder::BGR,
)
.or_else(|e| Err(e))
.unwrap();



let tensor_id = {
let start_for_elapsed_macro = std::time::Instant::now();
let tensor_type = tensor::TensorType::Fp32;
let tensor_id = tensor::Tensor::new(&tensor_dimensions, tensor_type, &tensor_data);
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Created tensor with ID: {:?} {}", tensor_id, elapsed_to_string("Tensor::new", elapsed));
eprintln!(
"Created tensor with ID: {:?} {}",
tensor_id,
elapsed_to_string("Tensor::new", elapsed)
);
tensor_id
};
let input_name = "0";
{
let start_for_elapsed_macro = std::time::Instant::now();
let set_input_result = inference::GraphExecutionContext::set_input(&context, input_name, tensor_id).unwrap();
let set_input_result =
inference::GraphExecutionContext::set_input(&context, input_name, tensor_id).unwrap();
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Input set with ID: {:?} {}", set_input_result, elapsed_to_string("GraphExecutionContext::set_input", elapsed));
eprintln!(
"Input set with ID: {:?} {}",
set_input_result,
elapsed_to_string("GraphExecutionContext::set_input", elapsed)
);
}
{
let start_for_elapsed_macro = std::time::Instant::now();
let _infered_result = inference::GraphExecutionContext::compute(&context).unwrap();
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Executed graph inference. {}", elapsed_to_string("GraphExecutionContext::compute", elapsed));
eprintln!(
"Executed graph inference. {}",
elapsed_to_string("GraphExecutionContext::compute", elapsed)
);
}
let output_result_id = {
let output_result_id = {
let start_for_elapsed_macro = std::time::Instant::now();
let output_result_id = inference::GraphExecutionContext::get_output(&context, input_name).unwrap();
let output_result_id =
inference::GraphExecutionContext::get_output(&context, input_name).unwrap();
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Obtaining output {}", elapsed_to_string("GraphExecutionContext::get_output", elapsed));
eprintln!(
"Obtaining output {}",
elapsed_to_string("GraphExecutionContext::get_output", elapsed)
);
output_result_id
};
let (output_data, output_dimensions, output_type) = {
let (output_data, output_dimensions, output_type) = {
let start_for_elapsed_macro = std::time::Instant::now();
let output_data = tensor::Tensor::data(&output_result_id);
let output_dimensions = tensor::Tensor::dimensions(&output_result_id);
let output_type = tensor::Tensor::ty(&output_result_id);
let elapsed = start_for_elapsed_macro.elapsed().as_nanos();
eprintln!("Copying data from tensor. {}", elapsed_to_string("Tensor::data+dimensions+type", elapsed));
eprintln!(
"Copying data from tensor. {}",
elapsed_to_string("Tensor::data+dimensions+type", elapsed)
);
(output_data, output_dimensions, output_type)
};
if output_dimensions.len() == 2 && output_dimensions[0] == 1 && output_dimensions[1] == 1001 && output_type == tensor::TensorType::Fp32 {
let output_vec_f32 = unsafe { std::slice::from_raw_parts(output_data.as_ptr() as *const f32, 1001) };
if output_dimensions.len() == 2
&& output_dimensions[0] == 1
&& output_dimensions[1] == 1001
&& output_type == tensor::TensorType::Fp32
{
let output_vec_f32 =
unsafe { std::slice::from_raw_parts(output_data.as_ptr() as *const f32, 1001) };
let results = sort_results(&output_vec_f32);
for i in 0..3 {
println!(
Expand All @@ -133,7 +176,10 @@ pub fn imagenet_openvino_test(path_as_string: String, target_as_string: String,
);
}
} else {
eprintln!("Output not as expected, output = {:?} {:?}", &output_dimensions, &output_type);
eprintln!(
"Output not as expected, output = {:?} {:?}",
&output_dimensions, &output_type
);
}
Ok(())
}
Expand All @@ -146,15 +192,18 @@ pub fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
.iter()
.skip(1)
.enumerate()
.map(|(c, p)| InferenceResult{ index: c, weight: *p})
.map(|(c, p)| InferenceResult {
index: c,
weight: *p,
})
.collect();
results.sort_by(|a, b| b.weight.partial_cmp(&a.weight).unwrap());
results
}

// A wrapper for class ID and match probabilities.
#[derive(Debug, PartialEq)]
pub struct InferenceResult{
pub struct InferenceResult {
pub index: usize,
pub weight: f32,
}
2 changes: 0 additions & 2 deletions crates/core/tests/core-wasi-test/src/imagenet_classes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1019,5 +1019,3 @@ pub const IMAGENET_CLASSES: [&str; 1000] = [
"ear, spike, capitulum",
"toilet tissue, toilet paper, bathroom tissue"
];


10 changes: 2 additions & 8 deletions crates/core/tests/core-wasi-test/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,22 +24,17 @@ mod ml {
});
}

mod imagenet_classes;
mod imagenet;
mod imagenet_classes;

use imagenet::imagenet_openvino_test;



use std::path::Path;

use crate::hello::test::test::gggg2::say_hello;



type Result = std::result::Result<(), Box<dyn std::error::Error>>;


fn main() -> Result {
let mut args = std::env::args();
let cmd = args.next().expect("cmd");
Expand Down Expand Up @@ -89,7 +84,6 @@ fn main() -> Result {
let target_as_string = args.next().expect("target");
let image_file_as_string = args.next().expect("image_file");
_ = imagenet_openvino_test(path_as_string, target_as_string, image_file_as_string);

}
"sleep" => {
let duration =
Expand All @@ -104,4 +98,4 @@ fn main() -> Result {
cmd => panic!("unknown cmd {cmd}"),
};
Ok(())
}
}
Loading

0 comments on commit 594e63f

Please sign in to comment.