From 5fa7311d308cca003d8445354146ea69f6d2bc61 Mon Sep 17 00:00:00 2001 From: Yordan Madzhunkov Date: Thu, 20 Jun 2024 16:46:05 +0300 Subject: [PATCH] Test pass --- Cargo.lock | 46 +++ crates/core/Cargo.toml | 2 +- crates/core/build.rs | 2 +- crates/core/tests/core-wasi-test/Cargo.toml | 2 +- crates/core/tests/hello_component.rs | 4 +- crates/core/tests/ml_component.rs | 385 ++++++++++---------- 6 files changed, 247 insertions(+), 194 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b759f15929..694bad0b2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1871,6 +1871,7 @@ dependencies = [ "libz-sys", "openssl-sys", "pkg-config", + "rustls-ffi", "vcpkg", "windows-sys 0.52.0", ] @@ -4881,6 +4882,27 @@ dependencies = [ "libc", ] +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "num_threads" version = "0.1.7" @@ -6660,6 +6682,21 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-ffi" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9da52707cca59e6eef8a78f3ad8d04024254a168ed1b41eb4dfa9616eace781a" +dependencies = [ + "libc", + "log", + "num_enum", + "rustls 0.20.9", + "rustls-pemfile 0.2.1", + "sct", + "webpki", +] + [[package]] name = "rustls-native-certs" version = "0.7.0" @@ -6673,6 +6710,15 @@ dependencies = [ "security-framework", ] +[[package]] +name = "rustls-pemfile" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" +dependencies = [ + "base64 0.13.1", +] + [[package]] name = "rustls-pemfile" version = "0.3.0" diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index faf1af4157..0dd1959373 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -36,5 +36,5 @@ spin-componentize = { workspace = true } futures = "0.3" [build-dependencies] -curl = "^0.4" +curl = { version = "^0.4", features = ["rustls"] } anyhow = "^1" \ No newline at end of file diff --git a/crates/core/build.rs b/crates/core/build.rs index c8445cdb51..9e136aa8cf 100644 --- a/crates/core/build.rs +++ b/crates/core/build.rs @@ -2,7 +2,7 @@ use std::env; use std::fs; use std::path::PathBuf; -extern crate curl; +//extern crate curl; use std::io::Write; use curl::easy::Easy; diff --git a/crates/core/tests/core-wasi-test/Cargo.toml b/crates/core/tests/core-wasi-test/Cargo.toml index e3fde5c310..6c590c7d56 100644 --- a/crates/core/tests/core-wasi-test/Cargo.toml +++ b/crates/core/tests/core-wasi-test/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" debug = true [dependencies] -wit-bindgen = "0.13.0" +wit-bindgen = "0.26.0" image2tensor = "0.3.1" [workspace] diff --git a/crates/core/tests/hello_component.rs b/crates/core/tests/hello_component.rs index 4abca99b75..3f94c243a4 100644 --- a/crates/core/tests/hello_component.rs +++ b/crates/core/tests/hello_component.rs @@ -25,8 +25,8 @@ pub mod hello { pub struct HelloHostImpl {} impl gggg2::Host for HelloHostImpl { - fn say_hello(&mut self, x: String) -> wasmtime::Result { - Ok(format!("Hello bace {x}!")) + fn say_hello(&mut self, x: String) -> String { + format!("Hello bace {x}!") } } } diff --git a/crates/core/tests/ml_component.rs b/crates/core/tests/ml_component.rs index 19456cac0f..bfb28f3a26 100644 --- a/crates/core/tests/ml_component.rs +++ b/crates/core/tests/ml_component.rs @@ -1,6 +1,11 @@ pub mod ml { wasmtime::component::bindgen!("ml" in "tests/core-wasi-test/wit"); + + use std::fmt::format; + + //use anyhow::Ok; + //use std::result::Result::Ok; use spin_core::HostComponent; use anyhow::{anyhow, Context}; @@ -14,6 +19,7 @@ pub mod ml { use test::test::graph::{ExecutionTarget, Graph, GraphBuilder, GraphEncoding}; use test::test::inference::GraphExecutionContext; use tokio::sync::Mutex; + use tokio::time::error::Elapsed; use wasmtime::component::Resource; use openvino::{Layout, Precision, TensorDesc}; @@ -71,28 +77,30 @@ pub mod ml { pub errors: table::Table, } - impl graph::HostGraph for MLHostImpl { - fn init_execution_context( - &mut self, - graph: Resource, - ) -> Result< - Result, Resource>, - anyhow::Error, - > { - // Construct the context if none is present; this is done lazily (i.e. - // upon actually loading a model) because it may fail to find and load - // the OpenVINO libraries. The laziness limits the extent of the error - // only to wasi-nn users, not all WASI users. - if self.openvino.is_none() { - self.openvino.replace(openvino::Core::new(None)?); - } - if self.openvino.is_some() { - if let Some(graph) = self.graphs.get(graph.rep()) { - let mut cnn_network = self - .openvino - .as_mut() - .expect("") - .read_network_from_buffer(&graph.xml, &graph.weights)?; + + + impl MLHostImpl { + + fn new_error(errors: &mut table::Table, code: ErrorCode, message: String) -> Resource { + errors + .push(ErrorInternalData { code: code, message: message,}) + .map(Resource::::new_own) + .expect("Can't allocate error") + } + + fn init_execution_context_internal( + graph: &GraphInternalData, + openvino: &mut Option, + executions: &mut table::Table, + ) -> Result, anyhow::Error> { + if openvino.is_none() { + openvino.replace(openvino::Core::new(None)?); + } + if openvino.is_some() { + let mut cnn_network = openvino + .as_mut() + .context("Can't create openvino graph without backend")? + .read_network_from_buffer(&graph.xml, &graph.weights)?; // Construct OpenVINO graph structures: `cnn_network` contains the graph // structure, `exec_network` can perform inference. @@ -110,47 +118,90 @@ pub mod ml { cnn_network.set_input_layout(&name, Layout::NHWC)?; } - let mut exec_network = self - .openvino + let mut exec_network = openvino .as_mut() .expect("") .load_network(&cnn_network, map_execution_target_to_string(graph.target))?; let infer_request = exec_network .create_infer_request() - .expect("Can't create InferRequest"); + .context("Can't create InferRequest")?; let graph_execution_context = GraphExecutionContextInternalData { cnn_network: cnn_network, executable_network: Mutex::new(exec_network), infer_request: infer_request, }; + return executions + .push(graph_execution_context) + .map(Resource::::new_own) + .map_err(|_|anyhow!("Can't store execution context")); + } + Err(anyhow!("Can't create openvino backend")) + } - match self - .executions - .push(graph_execution_context) - .map(Resource::::new_own) - { - Ok(res) => { - return Ok(Ok(res)); - } - Err(_) => { - match self.errors.push(ErrorInternalData { - code: ErrorCode::RuntimeError, - message: "Can't create graph execution context".to_string(), - }) { - Ok(id) => { - return Ok(Err(Resource::::new_own(id))); - } - Err(_) => { - return Err(anyhow!("Can't allocate error")); - } - } - } + fn get_output_internal(graph_execution: &mut GraphExecutionContextInternalData, input_name: String) -> Result { + let index = input_name + .parse::() + .map_err(|err| format!("Can't parse {} to usize for input_name", input_name))?; + let output_name = graph_execution + .cnn_network + .get_output_name(index) + .map_err(|err| format!("Can't find output name for ID = {index}"))?; + + let blob = graph_execution + .infer_request + .get_blob(&output_name) + .map_err(|err| format!("Can't get blob for output name = {output_name}"))?; + let tensor_desc = blob.tensor_desc() + .map_err(|err| format!("Can't get blob description"))?; + let buffer = blob + .buffer() + .map_err(|err| format!("Can't get blob buffer, error = {err}"))? + .iter() + .map(|&d| d as u8) + .collect::>(); + let tensor_dimensions = tensor_desc + .dims() + .iter() + .map(|&d| d as u32) + .collect::>(); + let tensor = TensorInternalData { + tensor_dimensions: tensor_dimensions, + tensor_type: map_precision_to_tensor_type(tensor_desc.precision()), + tensor_data: buffer, + }; + Ok(tensor) + } + + } + + impl graph::HostGraph for MLHostImpl { + fn init_execution_context( + &mut self, + graph: Resource, + ) -> Result, Resource> { + let res = match self.graphs.get(graph.rep()) { + Some(graph) => { + MLHostImpl::init_execution_context_internal(graph, &mut self.openvino, &mut self.executions) + .map_err(|err| + ErrorInternalData { + code: ErrorCode::RuntimeError, + message: err.to_string(), } + ) + } + None => { + Err(ErrorInternalData { + code: ErrorCode::RuntimeError, + message: "Can't create graph execution context".to_string(), + }) + } + }; + match res { + Ok(res) => { return Ok(res) } + Err(e) => { + return Err(MLHostImpl::new_error(&mut self.errors, e.code, e.message)); } } - Err(anyhow!( - "[graph::HostGraph] fn init_execution_context -> Not implemented" - )) } fn drop(&mut self, graph: Resource) -> Result<(), anyhow::Error> { @@ -165,45 +216,30 @@ pub mod ml { fn new( &mut self, code: errors::ErrorCode, - data: String, - ) -> Result, anyhow::Error> { - self.errors - .push(ErrorInternalData { - code: code, - message: data, - }) - .map(Resource::::new_own) - .map_err(|_| anyhow!("Can't allocate error")) + message: String, + ) -> Resource { + MLHostImpl::new_error(&mut self.errors, code, message) } - fn drop(&mut self, error: Resource) -> Result<(), anyhow::Error> { - self.errors - .remove(error.rep()) - .ok_or(anyhow!(format!( - "Can't find error with ID = {}", - error.rep() - ))) - .map(|_| ()) + fn drop(&mut self, error: Resource) -> std::result::Result<(), anyhow::Error> { + if let Some(e) = self.errors.remove(error.rep()) { + return Ok(()); + } + Err(anyhow!("Can't find error with ID = {}", error.rep())) } - fn code(&mut self, error: Resource) -> Result { - self.errors - .get(error.rep()) - .ok_or(anyhow!(format!( - "Can't find error with ID = {}", - error.rep() - ))) - .map(|e| e.code) + fn code(&mut self, error: Resource) -> ErrorCode { + if let Some(e) = self.errors.get(error.rep()) { + return e.code; + } + panic!("Can't find error with ID = {}", error.rep()); } - fn data(&mut self, error: Resource) -> Result { - self.errors - .get(error.rep()) - .ok_or(anyhow!(format!( - "Can't find error with ID = {}", - error.rep() - ))) - .map(|e| e.message.clone()) + fn data(&mut self, error: Resource) -> String { + if let Some(e) = self.errors.get(error.rep()) { + return e.message.clone(); + } + panic!("Can't find error with ID = {}", error.rep()); } } impl tensor::HostTensor for MLHostImpl { @@ -212,7 +248,7 @@ pub mod ml { tensor_dimensions: tensor::TensorDimensions, tensor_type: tensor::TensorType, tensor_data: tensor::TensorData, - ) -> Result, anyhow::Error> { + ) -> Resource { let tensor = TensorInternalData { tensor_dimensions: tensor_dimensions, tensor_type: tensor_type, @@ -221,49 +257,44 @@ pub mod ml { self.tensors .push(tensor) .map(Resource::::new_own) - .map_err(|_| anyhow!("Can't allocate tensor")) + .expect("Can't allocate tensor") } fn dimensions( &mut self, tensor: Resource, - ) -> Result, anyhow::Error> { - self.tensors - .get(tensor.rep()) - .ok_or(anyhow!(format!( - "Can't find tensor with ID = {}", - tensor.rep() - ))) - .map(|t| t.tensor_dimensions.clone()) + ) -> Vec { + if let Some(t) = self.tensors.get(tensor.rep()) { + return t.tensor_dimensions.clone(); + } + panic!("Can't find tensor with ID = {}", tensor.rep()); } + fn ty( &mut self, tensor: Resource, - ) -> Result { - self.tensors - .get(tensor.rep()) - .ok_or(anyhow!(format!( - "Can't find tensor with ID = {}", - tensor.rep() - ))) - .map(|t| t.tensor_type) + ) -> tensor::TensorType { + if let Some(t) = self.tensors.get(tensor.rep()) { + return t.tensor_type; + } + panic!("Can't find tensor with ID = {}", tensor.rep()); } + fn data( &mut self, tensor: Resource, - ) -> Result { - self.tensors - .get(tensor.rep()) - .ok_or(anyhow!(format!( - "Can't find tensor with ID = {}", - tensor.rep() - ))) - .map(|t| t.tensor_data.clone()) + ) -> tensor::TensorData { + if let Some(t) = self.tensors.get(tensor.rep()) { + return t.tensor_data.clone(); + } + panic!("Can't find tensor with ID = {}", tensor.rep()); } - fn drop(&mut self, tensor: Resource) -> Result<(), anyhow::Error> { + fn drop(&mut self, tensor: Resource) -> std::result::Result<(), anyhow::Error> { self.tensors .remove(tensor.rep()) - .context(format!("Can't find tensor with ID = {}", tensor.rep()))?; - Ok(()) + .context(format!("Can't find tensor with ID = {}", tensor.rep())) + .map(|_|()) + + } } @@ -273,16 +304,16 @@ pub mod ml { graph_execution_context: Resource, input_name: String, tensor: Resource, - ) -> Result>, anyhow::Error> { + ) -> Result<(), Resource> { let index = input_name .parse() - .context("Can't parse {} to usize for input_name")?; + .expect("Can't parse {} to usize for input_name"); // Construct the blob structure. TODO: there must be some good way to // discover the layout here; `desc` should not have to default to NHWC. let tensor_resource = self .tensors .get(tensor.rep()) - .context(format!("Can't find tensor with ID = {}", tensor.rep()))?; + .expect(format!("Can't find tensor with ID = {}", tensor.rep()).as_str()); let precision = map_tensor_type_to_precision(tensor_resource.tensor_type); let dimensions = tensor_resource .tensor_dimensions @@ -290,45 +321,46 @@ pub mod ml { .map(|&d| d as usize) .collect::>(); let desc = TensorDesc::new(Layout::NHWC, &dimensions, precision); - let blob = openvino::Blob::new(&desc, &tensor_resource.tensor_data)?; + let blob = openvino::Blob::new(&desc, &tensor_resource.tensor_data).expect("Error in Blob::new"); let execution_context: &mut GraphExecutionContextInternalData = self .executions .get_mut(graph_execution_context.rep()) - .context(format!( + .expect(format!( "Can't find graph execution context with ID = {}", graph_execution_context.rep() - ))?; + ).as_str()); let input_name = execution_context .cnn_network .get_input_name(index) - .context(format!("Can't find input with name = {}", index))?; - let res = match execution_context.infer_request.set_blob(&input_name, &blob) { + .expect(format!("Can't find input with name = {}", index).as_str()); + match execution_context.infer_request.set_blob(&input_name, &blob) { Ok(res) => Ok(res), - Err(err) => Err(self.new( + Err(err) => Err( + self.new( ErrorCode::RuntimeError, format!("Inference error = {:?}", err.to_string()), - )?), - }; - Ok(res) + )), + } + } fn compute( &mut self, graph_execution_context: Resource, - ) -> Result>, anyhow::Error> { + ) -> Result<(), Resource> { let graph_execution = self .executions .get_mut(graph_execution_context.rep()) - .ok_or(anyhow!(format!( - "Can't find graph execution context with ID = {}", - graph_execution_context.rep() - )))?; + .ok_or(MLHostImpl::new_error(&mut self.errors, + ErrorCode::RuntimeError, + format!("Can't find graph execution context with ID = {}", graph_execution_context.rep())))?; match graph_execution.infer_request.infer() { - Ok(..) => Ok(Ok(())), - Err(err) => Ok(Err(self.new( - ErrorCode::RuntimeError, - format!("Inference error = {:?}", err.to_string()), - )?)), + Ok(..) => Ok(()), + Err(err) => Err( + MLHostImpl::new_error(&mut self.errors, + ErrorCode::RuntimeError, + format!("Inference error = {:?}", err.to_string())) + ), } } @@ -336,70 +368,41 @@ pub mod ml { &mut self, graph_execution_context: Resource, input_name: String, - ) -> Result, Resource>, anyhow::Error> + ) -> Result, Resource> { - let index = input_name - .parse::() - .context("Can't parse {} to usize for input_name")?; + let graph_execution = self .executions .get_mut(graph_execution_context.rep()) - .ok_or(anyhow!(format!( - "Can't find graph execution context with ID = {}", - graph_execution_context.rep() - )))?; - let output_name = graph_execution - .cnn_network - .get_output_name(index) - .context("Can't find output name for ID = {index}")?; - let blob = graph_execution - .infer_request - .get_blob(&output_name) - .context("Can't get blob for output name = {output_name}")?; - let tensor_desc = blob.tensor_desc().context("Can't get blob description")?; - let buffer = blob - .buffer() - .context("Can't get blob buffer")? - .iter() - .map(|&d| d as u8) - .collect::>(); - let tensor_dimensions = tensor_desc - .dims() - .iter() - .map(|&d| d as u32) - .collect::>(); + .ok_or(format!("Can't find graph execution context with ID = {}", graph_execution_context.rep())).unwrap(); - let tensor = TensorInternalData { - tensor_dimensions: tensor_dimensions, - tensor_type: map_precision_to_tensor_type(tensor_desc.precision()), - tensor_data: buffer, - }; - match self - .tensors - .push(tensor) - .map(Resource::::new_own) - { - Ok(t) => { - return Ok(Ok(t)); + match MLHostImpl::get_output_internal(graph_execution, input_name) { + Ok(tensor) => { + self.tensors + .push(tensor) + .map(Resource::::new_own) + .map_err(|_| self.new(ErrorCode::RuntimeError, format!("Can't create tensor for get_output"))) } - Err(_) => { - return Ok(Err(self.new( - ErrorCode::RuntimeError, - format!("Can't create tensor for get_output"), - )?)) + Err(err) => { + Err(MLHostImpl::new_error(&mut self.errors, + ErrorCode::RuntimeError, + err)) } } + } fn drop( &mut self, execution: Resource, - ) -> Result<(), anyhow::Error> { + ) -> std::result::Result<(), anyhow::Error> { let id = execution.rep(); self.executions .remove(id) - .context("{Can't drow GraphExecutionContext with id = {id}")?; - Ok(()) + .context("{Can't drow GraphExecutionContext with id = {id}") + .map(|_|()) + + } } @@ -410,12 +413,16 @@ pub mod ml { graph: Vec, graph_encoding: GraphEncoding, target: ExecutionTarget, - ) -> Result, Resource>, anyhow::Error> { + ) -> Result, Resource> { if graph.len() != 2 { - return Err(anyhow!("Expected 2 elements in graph builder vector")); + return Err(MLHostImpl::new_error(&mut self.errors, + ErrorCode::RuntimeError, + format!("Expected 2 elements in graph builder vector"))); } if graph_encoding != GraphEncoding::Openvino { - return Err(anyhow!("Only OpenVINO encoding is supported")); + return Err(MLHostImpl::new_error(&mut self.errors, + ErrorCode::RuntimeError, + format!("Only OpenVINO encoding is supported"))); } // Read the guest array. let graph_internal_data = GraphInternalData { @@ -425,7 +432,7 @@ pub mod ml { }; match self.graphs.push(graph_internal_data) { Ok(graph_rep) => { - return Ok(Ok(Resource::::new_own(graph_rep))); + return Ok(Resource::::new_own(graph_rep)); } Err(err) => { match self.errors.push(ErrorInternalData { @@ -433,10 +440,10 @@ pub mod ml { message: format!("{:?}", err), }) { Ok(error_rep) => { - return Ok(Err(Resource::::new_own(error_rep))); + return Err(Resource::::new_own(error_rep)); } Err(err) => { - return Err(anyhow!("Can't create internal error for {:?}", err)); + panic!("Can't create internal error for {:?}", err); } } } @@ -445,8 +452,8 @@ pub mod ml { fn load_by_name( &mut self, _graph: String, - ) -> Result, Resource>, anyhow::Error> { - Err(anyhow!("[graph::Host] fn load_by_name -> Not implemented")) + ) -> Result, Resource> { + panic!("[graph::Host] fn load_by_name -> Not implemented"); } }