diff --git a/Cargo.lock b/Cargo.lock index eec0eea85a26..d446e72ab678 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3877,6 +3877,7 @@ version = "23.0.0" dependencies = [ "anyhow", "cap-std", + "libtest-mimic", "openvino", "ort", "test-programs-artifacts", diff --git a/Cargo.toml b/Cargo.toml index b2b8fcf8dbc7..4a3f1158044d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -112,7 +112,7 @@ cranelift-codegen = { workspace = true } cranelift-reader = { workspace = true } toml = { workspace = true } similar = { workspace = true } -libtest-mimic = "0.7.0" +libtest-mimic = { workspace = true } capstone = { workspace = true } object = { workspace = true, features = ['std'] } wasmtime-test-macros = { path = "crates/test-macros" } @@ -319,6 +319,7 @@ humantime = "2.0.0" postcard = { version = "1.0.8", default-features = false, features = ['alloc'] } criterion = { version = "0.5.0", default-features = false, features = ["html_reports", "rayon"] } rustc-hash = "1.1.0" +libtest-mimic = "0.7.0" # ============================================================================= # diff --git a/crates/test-programs/src/bin/nn_image_classification.rs b/crates/test-programs/src/bin/nn_image_classification.rs index af2b40c1ff03..5815503c3f76 100644 --- a/crates/test-programs/src/bin/nn_image_classification.rs +++ b/crates/test-programs/src/bin/nn_image_classification.rs @@ -1,17 +1,17 @@ -use anyhow::Result; +use anyhow::{Context, Result}; use std::fs; use test_programs::nn::{classify, sort_results}; use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding}; pub fn main() -> Result<()> { let xml = fs::read("fixture/model.xml") - .expect("the model file to be mapped to the fixture directory"); + .context("the model file to be mapped to the fixture directory")?; let weights = fs::read("fixture/model.bin") - .expect("the weights file to be mapped to the fixture directory"); + .context("the weights file to be mapped to the fixture directory")?; let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU) .build_from_bytes([&xml, &weights])?; let tensor = fs::read("fixture/tensor.bgr") - .expect("the tensor file to be mapped to the fixture directory"); + .context("the tensor file to be mapped to the fixture directory")?; let results = classify(graph, tensor)?; let top_five = &sort_results(&results)[..5]; println!("found results, sorted top 5: {:?}", top_five); diff --git a/crates/test-programs/src/bin/nn_image_classification_named.rs b/crates/test-programs/src/bin/nn_image_classification_named.rs index cac723d085fe..9b75a5afb4c6 100644 --- a/crates/test-programs/src/bin/nn_image_classification_named.rs +++ b/crates/test-programs/src/bin/nn_image_classification_named.rs @@ -1,4 +1,4 @@ -use anyhow::Result; +use anyhow::{Context, Result}; use std::fs; use test_programs::nn::{classify, sort_results}; use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding}; @@ -7,7 +7,7 @@ pub fn main() -> Result<()> { let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU) .build_from_cache("fixtures")?; let tensor = fs::read("fixture/tensor.bgr") - .expect("the tensor file to be mapped to the fixture directory"); + .context("the tensor file to be mapped to the fixture directory")?; let results = classify(graph, tensor)?; let top_five = &sort_results(&results)[..5]; println!("found results, sorted top 5: {:?}", top_five); diff --git a/crates/test-programs/src/bin/nn_image_classification_onnx.rs b/crates/test-programs/src/bin/nn_image_classification_onnx.rs index df8f961c7516..abb77d0e7339 100644 --- a/crates/test-programs/src/bin/nn_image_classification_onnx.rs +++ b/crates/test-programs/src/bin/nn_image_classification_onnx.rs @@ -1,15 +1,15 @@ -use anyhow::Result; +use anyhow::{Context, Result}; use std::fs; use test_programs::nn::{classify, sort_results}; use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding}; pub fn main() -> Result<()> { let model = fs::read("fixture/model.onnx") - .expect("the model file to be mapped to the fixture directory"); + .context("the model file to be mapped to the fixture directory")?; let graph = GraphBuilder::new(GraphEncoding::Onnx, ExecutionTarget::CPU).build_from_bytes([&model])?; let tensor = fs::read("fixture/000000062808.rgb") - .expect("the tensor file to be mapped to the fixture directory"); + .context("the tensor file to be mapped to the fixture directory")?; let results = classify(graph, tensor)?; let top_five = &sort_results(&results)[..5]; // 963 is meat loaf, meatloaf. diff --git a/crates/test-programs/src/bin/nn_image_classification_winml.rs b/crates/test-programs/src/bin/nn_image_classification_winml.rs index 179d15ded079..0dc7e8843525 100644 --- a/crates/test-programs/src/bin/nn_image_classification_winml.rs +++ b/crates/test-programs/src/bin/nn_image_classification_winml.rs @@ -1,14 +1,13 @@ -use anyhow::Result; +use anyhow::{Context, Result}; use std::fs; use test_programs::nn::{classify, sort_results}; - use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding}; pub fn main() -> Result<()> { let graph = GraphBuilder::new(GraphEncoding::Onnx, ExecutionTarget::CPU) .build_from_cache("mobilenet")?; let tensor = fs::read("fixture/kitten.rgb") - .expect("the tensor file to be mapped to the fixture directory"); + .context("the tensor file to be mapped to the fixture directory")?; let results = classify(graph, tensor)?; let top_five = &sort_results(&results)[..5]; println!("found results, sorted top 5: {:?}", top_five); diff --git a/crates/wasi-nn/Cargo.toml b/crates/wasi-nn/Cargo.toml index 43dba88519c8..7390c1e33146 100644 --- a/crates/wasi-nn/Cargo.toml +++ b/crates/wasi-nn/Cargo.toml @@ -29,15 +29,14 @@ openvino = { version = "0.6.0", features = [ "runtime-linking", ], optional = true } -ort = { version = "2.0.0-rc.0", default-features = false, features = ["copy-dylibs", "download-binaries"], optional = true } +ort = { version = "2.0.0-rc.0", default-features = false, features = [ + "copy-dylibs", + "download-binaries", +], optional = true } [target.'cfg(windows)'.dependencies.windows] version = "0.52" -features = [ - "AI_MachineLearning", - "Storage_Streams", - "Foundation_Collections", -] +features = ["AI_MachineLearning", "Storage_Streams", "Foundation_Collections"] optional = true [build-dependencies] @@ -45,6 +44,7 @@ walkdir = { workspace = true } [dev-dependencies] cap-std = { workspace = true } +libtest-mimic = { workspace = true } test-programs-artifacts = { workspace = true } wasi-common = { workspace = true, features = ["sync"] } wasmtime = { workspace = true, features = ["cranelift"] } @@ -57,3 +57,7 @@ openvino = ["dep:openvino"] onnx = ["dep:ort"] # winml is only available on Windows 10 1809 and later. winml = ["dep:windows"] + +[[test]] +name = "test-programs" +harness = false diff --git a/crates/wasi-nn/src/lib.rs b/crates/wasi-nn/src/lib.rs index e66c59fb2a05..71d089d07489 100644 --- a/crates/wasi-nn/src/lib.rs +++ b/crates/wasi-nn/src/lib.rs @@ -4,7 +4,6 @@ mod registry; pub mod backend; pub use ctx::{preload, WasiNnCtx}; pub use registry::{GraphRegistry, InMemoryRegistry}; -pub mod testing; pub mod wit; pub mod witx; diff --git a/crates/wasi-nn/src/testing.rs b/crates/wasi-nn/src/testing.rs deleted file mode 100644 index da0360898da5..000000000000 --- a/crates/wasi-nn/src/testing.rs +++ /dev/null @@ -1,173 +0,0 @@ -//! This is testing-specific code--it is public only so that it can be -//! accessible both in unit and integration tests. -//! -//! This module checks: -//! - that OpenVINO can be found in the environment -//! - that WinML is available -//! - that some ML model artifacts can be downloaded and cached. - -#[allow(unused_imports)] -use anyhow::{anyhow, Context, Result}; -use std::{ - env, fs, - path::{Path, PathBuf}, - process::Command, - sync::Mutex, -}; - -#[cfg(all(feature = "winml", target_arch = "x86_64", target_os = "windows"))] -use windows::AI::MachineLearning::{LearningModelDevice, LearningModelDeviceKind}; - -/// Return the directory in which the test artifacts are stored. -pub fn artifacts_dir() -> PathBuf { - PathBuf::from(env!("OUT_DIR")).join("fixtures") -} - -/// Early-return from a test if the test environment is not met. If the `CI` -/// or `FORCE_WASINN_TEST_CHECK` environment variables are set, though, this -/// will return an error instead. -#[macro_export] -macro_rules! check_test { - () => { - if let Err(e) = $crate::testing::check() { - if std::env::var_os("CI").is_some() - || std::env::var_os("FORCE_WASINN_TEST_CHECK").is_some() - { - return Err(e); - } else { - println!("> ignoring test: {}", e); - return Ok(()); - } - } - }; -} - -/// Return `Ok` if all checks pass. -pub fn check() -> Result<()> { - #[cfg(all( - feature = "openvino", - target_arch = "x86_64", - any(target_os = "linux", target_os = "windows") - ))] - { - check_openvino_is_installed()?; - } - #[cfg(feature = "openvino")] - check_openvino_artifacts_are_available()?; - - #[cfg(any(feature = "onnx", all(feature = "winml", target_os = "windows")))] - check_onnx_artifacts_are_available()?; - - #[cfg(all(feature = "winml", target_os = "windows"))] - { - check_winml_is_available()?; - } - Ok(()) -} - -/// Protect `check_openvino_artifacts_are_available` from concurrent access; -/// when running tests in parallel, we want to avoid two threads attempting to -/// create the same directory or download the same file. -static ARTIFACTS: Mutex<()> = Mutex::new(()); - -/// Return `Ok` if we find a working OpenVINO installation. -#[cfg(all( - feature = "openvino", - target_arch = "x86_64", - any(target_os = "linux", target_os = "windows") -))] -fn check_openvino_is_installed() -> Result<()> { - match std::panic::catch_unwind(|| println!("> found openvino version: {}", openvino::version())) - { - Ok(_) => Ok(()), - Err(e) => Err(anyhow!("unable to find an OpenVINO installation: {:?}", e)), - } -} - -/// Return `Ok` if we find the cached MobileNet test artifacts; this will -/// download the artifacts if necessary. -#[cfg(feature = "openvino")] -fn check_openvino_artifacts_are_available() -> Result<()> { - let _exclusively_retrieve_artifacts = ARTIFACTS.lock().unwrap(); - const BASE_URL: &str = - "https://github.com/intel/openvino-rs/raw/main/crates/openvino/tests/fixtures/mobilenet"; - let artifacts_dir = artifacts_dir(); - if !artifacts_dir.is_dir() { - fs::create_dir(&artifacts_dir)?; - } - for (from, to) in [ - ("mobilenet.bin", "model.bin"), - ("mobilenet.xml", "model.xml"), - ("tensor-1x224x224x3-f32.bgr", "tensor.bgr"), - ] { - let remote_url = [BASE_URL, from].join("/"); - let local_path = artifacts_dir.join(to); - if !local_path.is_file() { - download(&remote_url, &local_path) - .with_context(|| "unable to retrieve test artifact")?; - } else { - println!("> using cached artifact: {}", local_path.display()) - } - } - Ok(()) -} - -#[cfg(all(feature = "winml", target_os = "windows"))] -fn check_winml_is_available() -> Result<()> { - match std::panic::catch_unwind(|| { - println!( - "> WinML learning device is available: {:?}", - LearningModelDevice::Create(LearningModelDeviceKind::Default) - ) - }) { - Ok(_) => Ok(()), - Err(e) => Err(anyhow!("WinML learning device is not available: {:?}", e)), - } -} - -#[cfg(any(feature = "onnx", all(feature = "winml", target_os = "windows")))] -fn check_onnx_artifacts_are_available() -> Result<()> { - let _exclusively_retrieve_artifacts = ARTIFACTS.lock().unwrap(); - - const ONNX_BASE_URL: &str = - "https://github.com/onnx/models/raw/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/mobilenet/model/mobilenetv2-10.onnx?download="; - - let artifacts_dir = artifacts_dir(); - if !artifacts_dir.is_dir() { - fs::create_dir(&artifacts_dir)?; - } - - for (from, to) in [(ONNX_BASE_URL.to_string(), "model.onnx")] { - let local_path = artifacts_dir.join(to); - if !local_path.is_file() { - download(&from, &local_path).with_context(|| "unable to retrieve test artifact")?; - } else { - println!("> using cached artifact: {}", local_path.display()) - } - } - - // Copy image from source tree to artifact directory. - let image_path = env::current_dir()? - .join("tests") - .join("fixtures") - .join("000000062808.rgb"); - let dest_path = artifacts_dir.join("000000062808.rgb"); - fs::copy(&image_path, &dest_path)?; - Ok(()) -} - -/// Retrieve the bytes at the `from` URL and place them in the `to` file. -fn download(from: &str, to: &Path) -> anyhow::Result<()> { - let mut curl = Command::new("curl"); - curl.arg("--location").arg(from).arg("--output").arg(to); - println!("> downloading: {:?}", &curl); - let result = curl.output().unwrap(); - if !result.status.success() { - panic!( - "curl failed: {}\n{}", - result.status, - String::from_utf8_lossy(&result.stderr) - ); - } - Ok(()) -} diff --git a/crates/wasi-nn/tests/all.rs b/crates/wasi-nn/tests/all.rs deleted file mode 100644 index 2c41f17db2e3..000000000000 --- a/crates/wasi-nn/tests/all.rs +++ /dev/null @@ -1,122 +0,0 @@ -//! Run the wasi-nn tests in `crates/test-programs`. - -use anyhow::Result; -use std::path::Path; -use test_programs_artifacts::*; -use wasi_common::sync::{Dir, WasiCtxBuilder}; -use wasi_common::WasiCtx; -use wasmtime::{Config, Engine, Linker, Module, Store}; -use wasmtime_wasi_nn::{backend, testing, Backend, InMemoryRegistry, WasiNnCtx}; - -const PREOPENED_DIR_NAME: &str = "fixture"; - -/// Run a wasi-nn test program. This is modeled after -/// `crates/wasi/tests/all/main.rs` but still uses the older preview1 API for -/// file reads. -fn run(path: &str, backend: Backend, preload_model: bool) -> Result<()> { - wasmtime_wasi_nn::check_test!(); - let path = Path::new(path); - let engine = Engine::new(&Config::new())?; - let mut linker = Linker::new(&engine); - wasmtime_wasi_nn::witx::add_to_linker(&mut linker, |s: &mut Ctx| &mut s.wasi_nn)?; - wasi_common::sync::add_to_linker(&mut linker, |s: &mut Ctx| &mut s.wasi)?; - let module = Module::from_file(&engine, path)?; - let mut store = Store::new( - &engine, - Ctx::new(&testing::artifacts_dir(), preload_model, backend)?, - ); - let instance = linker.instantiate(&mut store, &module)?; - let start = instance.get_typed_func::<(), ()>(&mut store, "_start")?; - start.call(&mut store, ())?; - Ok(()) -} - -/// The host state for running wasi-nn tests. -struct Ctx { - wasi: WasiCtx, - wasi_nn: WasiNnCtx, -} - -impl Ctx { - fn new(preopen_dir: &Path, preload_model: bool, mut backend: Backend) -> Result { - // Create the WASI context. - let preopen_dir = Dir::open_ambient_dir(preopen_dir, cap_std::ambient_authority())?; - let mut builder = WasiCtxBuilder::new(); - builder - .inherit_stdio() - .preopened_dir(preopen_dir, PREOPENED_DIR_NAME)?; - let wasi = builder.build(); - - let mut registry = InMemoryRegistry::new(); - let mobilenet_dir = testing::artifacts_dir(); - if preload_model { - registry.load((backend).as_dir_loadable().unwrap(), &mobilenet_dir)?; - } - let wasi_nn = WasiNnCtx::new([backend.into()], registry.into()); - - Ok(Self { wasi, wasi_nn }) - } -} - -// Check that every wasi-nn test in `crates/test-programs` has its -// manually-added `#[test]` function. -macro_rules! assert_test_exists { - ($name:ident) => { - #[allow(unused_imports)] - use self::$name as _; - }; -} -foreach_nn!(assert_test_exists); - -#[cfg_attr( - not(all( - target_arch = "x86_64", - any(target_os = "linux", target_os = "windows") - )), - ignore -)] -#[test] -fn nn_image_classification() { - let backend = Backend::from(backend::openvino::OpenvinoBackend::default()); - run(NN_IMAGE_CLASSIFICATION, backend, false).unwrap() -} - -#[cfg_attr( - not(all( - target_arch = "x86_64", - any(target_os = "linux", target_os = "windows") - )), - ignore -)] -#[test] -fn nn_image_classification_named() { - let backend = Backend::from(backend::openvino::OpenvinoBackend::default()); - run(NN_IMAGE_CLASSIFICATION_NAMED, backend, true).unwrap() -} - -#[cfg_attr(not(all(feature = "winml", target_os = "windows")), ignore)] -#[test] -fn nn_image_classification_winml() { - #[cfg(all(feature = "winml", target_os = "windows"))] - { - let backend = Backend::from(backend::winml::WinMLBackend::default()); - run(NN_IMAGE_CLASSIFICATION_ONNX, backend, true).unwrap() - } -} - -#[cfg_attr( - not(all( - feature = "onnx", - any(target_arch = "x86_64", target_arch = "aarch64"), - any(target_os = "linux", target_os = "windows", target_os = "macos") - )), - ignore -)] -#[test] -fn nn_image_classification_onnx() { - #[cfg(feature = "onnx")] - { - let backend = Backend::from(backend::onnxruntime::OnnxBackend::default()); - run(NN_IMAGE_CLASSIFICATION_ONNX, backend, false).unwrap() - } -} diff --git a/crates/wasi-nn/tests/check/mod.rs b/crates/wasi-nn/tests/check/mod.rs new file mode 100644 index 000000000000..ffdc099b3009 --- /dev/null +++ b/crates/wasi-nn/tests/check/mod.rs @@ -0,0 +1,49 @@ +//! This is testing-specific code--it is public only so that it can be +//! accessible both in unit and integration tests. +//! +//! This module checks: +//! - that OpenVINO can be found in the environment +//! - that WinML is available +//! - that some ML model artifacts can be downloaded and cached. + +#[allow(unused_imports)] +use anyhow::{anyhow, Context, Result}; +use std::{ + env, + path::{Path, PathBuf}, + process::Command, + sync::Mutex, +}; + +#[cfg(any(feature = "onnx", feature = "winml"))] +pub mod onnx; +#[cfg(feature = "openvino")] +pub mod openvino; +#[cfg(all(feature = "winml", target_os = "windows"))] +pub mod winml; + +/// Protect `are_artifacts_available` from concurrent access; when running tests +/// in parallel, we want to avoid two threads attempting to create the same +/// directory or download the same file. +pub static DOWNLOAD_LOCK: Mutex<()> = Mutex::new(()); + +/// Return the directory in which the test artifacts are stored. +pub fn artifacts_dir() -> PathBuf { + PathBuf::from(env!("OUT_DIR")).join("fixtures") +} + +/// Retrieve the bytes at the `from` URL and place them in the `to` file. +fn download(from: &str, to: &Path) -> anyhow::Result<()> { + let mut curl = Command::new("curl"); + curl.arg("--location").arg(from).arg("--output").arg(to); + println!("> downloading: {:?}", &curl); + let result = curl.output().unwrap(); + if !result.status.success() { + panic!( + "curl failed: {}\n{}", + result.status, + String::from_utf8_lossy(&result.stderr) + ); + } + Ok(()) +} diff --git a/crates/wasi-nn/tests/check/onnx.rs b/crates/wasi-nn/tests/check/onnx.rs new file mode 100644 index 000000000000..b5a451e0373f --- /dev/null +++ b/crates/wasi-nn/tests/check/onnx.rs @@ -0,0 +1,38 @@ +#![allow(unused)] + +use super::{artifacts_dir, download, DOWNLOAD_LOCK}; +use anyhow::{Context, Result}; +use std::sync::Mutex; +use std::{env, fs}; + +/// Return `Ok` if we find the cached MobileNet test artifacts; this will +/// download the artifacts if necessary. +pub fn are_artifacts_available() -> Result<()> { + let _exclusively_retrieve_artifacts = DOWNLOAD_LOCK.lock().unwrap(); + + const ONNX_BASE_URL: &str = + "https://github.com/onnx/models/raw/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/mobilenet/model/mobilenetv2-10.onnx?download="; + + let artifacts_dir = artifacts_dir(); + if !artifacts_dir.is_dir() { + fs::create_dir(&artifacts_dir)?; + } + + for (from, to) in [(ONNX_BASE_URL.to_string(), "model.onnx")] { + let local_path = artifacts_dir.join(to); + if !local_path.is_file() { + download(&from, &local_path).with_context(|| "unable to retrieve test artifact")?; + } else { + println!("> using cached artifact: {}", local_path.display()) + } + } + + // Copy image from source tree to artifact directory. + let image_path = env::current_dir()? + .join("tests") + .join("fixtures") + .join("000000062808.rgb"); + let dest_path = artifacts_dir.join("000000062808.rgb"); + fs::copy(&image_path, &dest_path)?; + Ok(()) +} diff --git a/crates/wasi-nn/tests/check/openvino.rs b/crates/wasi-nn/tests/check/openvino.rs new file mode 100644 index 000000000000..fccdc678d09e --- /dev/null +++ b/crates/wasi-nn/tests/check/openvino.rs @@ -0,0 +1,42 @@ +use super::{artifacts_dir, download, DOWNLOAD_LOCK}; +use anyhow::{bail, Context, Result}; +use std::fs; + +/// Return `Ok` if we find a working OpenVINO installation. +pub fn is_installed() -> Result<()> { + match std::panic::catch_unwind(|| println!("> found openvino version: {}", openvino::version())) + { + Ok(_) => Ok(()), + Err(e) => bail!( + "unable to find an OpenVINO installation: {:?}", + e.downcast_ref::() + ), + } +} + +/// Return `Ok` if we find the cached MobileNet test artifacts; this will +/// download the artifacts if necessary. +pub fn are_artifacts_available() -> Result<()> { + let _exclusively_retrieve_artifacts = DOWNLOAD_LOCK.lock().unwrap(); + const BASE_URL: &str = + "https://github.com/intel/openvino-rs/raw/main/crates/openvino/tests/fixtures/mobilenet"; + let artifacts_dir = artifacts_dir(); + if !artifacts_dir.is_dir() { + fs::create_dir(&artifacts_dir)?; + } + for (from, to) in [ + ("mobilenet.bin", "model.bin"), + ("mobilenet.xml", "model.xml"), + ("tensor-1x224x224x3-f32.bgr", "tensor.bgr"), + ] { + let remote_url = [BASE_URL, from].join("/"); + let local_path = artifacts_dir.join(to); + if !local_path.is_file() { + download(&remote_url, &local_path) + .with_context(|| "unable to retrieve test artifact")?; + } else { + println!("> using cached artifact: {}", local_path.display()) + } + } + Ok(()) +} diff --git a/crates/wasi-nn/tests/check/winml.rs b/crates/wasi-nn/tests/check/winml.rs new file mode 100644 index 000000000000..fc45337a6025 --- /dev/null +++ b/crates/wasi-nn/tests/check/winml.rs @@ -0,0 +1,15 @@ +use anyhow::{anyhow, Result}; +use windows::AI::MachineLearning::{LearningModelDevice, LearningModelDeviceKind}; + +/// Return `Ok` if we can use WinML. +pub fn is_available() -> Result<()> { + match std::panic::catch_unwind(|| { + println!( + "> WinML learning device is available: {:?}", + LearningModelDevice::Create(LearningModelDeviceKind::Default) + ) + }) { + Ok(_) => Ok(()), + Err(e) => Err(anyhow!("WinML learning device is not available: {:?}", e)), + } +} diff --git a/crates/wasi-nn/tests/exec/mod.rs b/crates/wasi-nn/tests/exec/mod.rs new file mode 100644 index 000000000000..23840e7a5d3a --- /dev/null +++ b/crates/wasi-nn/tests/exec/mod.rs @@ -0,0 +1,52 @@ +use crate::check::artifacts_dir; +use anyhow::Result; +use std::path::Path; +use wasi_common::sync::{Dir, WasiCtxBuilder}; +use wasi_common::WasiCtx; +use wasmtime::{Config, Engine, Linker, Module, Store}; +use wasmtime_wasi_nn::{Backend, InMemoryRegistry, WasiNnCtx}; + +const PREOPENED_DIR_NAME: &str = "fixture"; + +/// Run a wasi-nn test program. This is modeled after +/// `crates/wasi/tests/all/main.rs` but still uses the older preview1 API +/// for file reads. +pub fn run(path: &str, backend: Backend, preload_model: bool) -> Result<()> { + let path = Path::new(path); + let engine = Engine::new(&Config::new())?; + let mut linker = Linker::new(&engine); + wasmtime_wasi_nn::witx::add_to_linker(&mut linker, |s: &mut Ctx| &mut s.wasi_nn)?; + wasi_common::sync::add_to_linker(&mut linker, |s: &mut Ctx| &mut s.wasi)?; + let module = Module::from_file(&engine, path)?; + let mut store = Store::new(&engine, Ctx::new(&artifacts_dir(), preload_model, backend)?); + let instance = linker.instantiate(&mut store, &module)?; + let start = instance.get_typed_func::<(), ()>(&mut store, "_start")?; + start.call(&mut store, ())?; + Ok(()) +} + +/// The host state for running wasi-nn tests. +struct Ctx { + wasi: WasiCtx, + wasi_nn: WasiNnCtx, +} + +impl Ctx { + fn new(preopen_dir: &Path, preload_model: bool, mut backend: Backend) -> Result { + let preopen_dir = Dir::open_ambient_dir(preopen_dir, cap_std::ambient_authority())?; + let mut builder = WasiCtxBuilder::new(); + builder + .inherit_stdio() + .preopened_dir(preopen_dir, PREOPENED_DIR_NAME)?; + let wasi = builder.build(); + + let mut registry = InMemoryRegistry::new(); + let mobilenet_dir = artifacts_dir(); + if preload_model { + registry.load((backend).as_dir_loadable().unwrap(), &mobilenet_dir)?; + } + let wasi_nn = WasiNnCtx::new([backend.into()], registry.into()); + + Ok(Self { wasi, wasi_nn }) + } +} diff --git a/crates/wasi-nn/tests/test-programs.rs b/crates/wasi-nn/tests/test-programs.rs new file mode 100644 index 000000000000..6dfb89a90e58 --- /dev/null +++ b/crates/wasi-nn/tests/test-programs.rs @@ -0,0 +1,199 @@ +//! Run the wasi-nn tests in `crates/test-programs`. +//! +//! It may be difficult to run to run all tests on all platforms; we check the +//! pre-requisites for each test dynamically (see [`check`]). Using +//! `libtest-mimic` allows us then to dynamically ignore tests that cannot run +//! on the current machine. +//! +//! There are two modes these tests run in: +//! - "ignore if unavailable" mode: if the checks for a test fail (e.g., the +//! backend is not installed, test artifacts cannot download, we're on the +//! wrong platform), the test is ignored. +//! - "fail if unavailable" mode: when the `CI` or `FORCE_WASINN_TEST_CHECK` +//! environment variables are set, any checks that fail cause the test to fail +//! early. + +mod check; +mod exec; + +use anyhow::Result; +use libtest_mimic::{Arguments, Trial}; +use std::{borrow::Cow, env}; +use test_programs_artifacts::*; +use wasmtime_wasi_nn::{backend, Backend}; + +fn main() -> Result<()> { + if cfg!(miri) { + return Ok(()); + } + + // Gather a list of the test-program names. + let mut programs = Vec::new(); + macro_rules! add_to_list { + ($name:ident) => { + programs.push(stringify!($name)); + }; + } + foreach_nn!(add_to_list); + + // Make ignored tests turn into failures. + let error_on_failed_check = + env::var_os("CI").is_some() || env::var_os("FORCE_WASINN_TEST_CHECK").is_some(); + + // Inform `libtest-mimic` how to run each test program. + let arguments = Arguments::from_args(); + let mut trials = Vec::new(); + for program in programs { + // Either ignore the test if it cannot run (i.e., downgrade `Fail` to + // `Ignore`) or pre-emptively fail it if `error_on_failed_check` is set. + let (run_test, mut check) = check_test_program(program); + if !error_on_failed_check { + check = check.downgrade_failure(); // Downgrade `Fail` to `Ignore`. + } + let should_ignore = check.is_ignore(); + if arguments.nocapture && should_ignore { + println!("> ignoring {program}: {}", check.reason()); + } + let trial = Trial::test(program, move || { + run_test().map_err(|e| format!("{:?}", e).into()) + }) + .with_ignored_flag(should_ignore); + trials.push(trial); + } + + // Run the tests. + libtest_mimic::run(&arguments, trials).exit() +} + +/// Return the test program to run and a check that must pass for the test to +/// run. +fn check_test_program(name: &str) -> (fn() -> Result<()>, IgnoreCheck) { + use IgnoreCheck::*; + match name { + "nn_image_classification" => ( + nn_image_classification, + if !cfg!(target_arch = "x86_64") { + Fail("requires x86_64".into()) + } else if !cfg!(target_os = "linux") && !cfg!(target_os = "windows") { + Fail("requires linux or windows".into()) + } else if let Err(e) = check::openvino::is_installed() { + Fail(e.to_string().into()) + } else { + Run + }, + ), + "nn_image_classification_named" => ( + nn_image_classification_named, + if !cfg!(target_arch = "x86_64") { + Fail("requires x86_64".into()) + } else if !cfg!(target_os = "linux") && !cfg!(target_os = "windows") { + Fail("requires linux or windows or macos".into()) + } else if let Err(e) = check::openvino::is_installed() { + Fail(e.to_string().into()) + } else { + Run + }, + ), + "nn_image_classification_onnx" => ( + nn_image_classification_onnx, + #[cfg(feature = "onnx")] + if !cfg!(target_arch = "x86_64") && !cfg!(target_arch = "aarch64") { + Fail("requires x86_64 or aarch64".into()) + } else if !cfg!(target_os = "linux") + && !cfg!(target_os = "windows") + && !cfg!(target_os = "macos") + { + Fail("requires linux, windows, or macos".into()) + } else { + Run + }, + #[cfg(not(feature = "onnx"))] + Ignore("requires the `onnx` feature".into()), + ), + "nn_image_classification_winml" => ( + nn_image_classification_winml, + #[cfg(all(feature = "winml", target_os = "windows"))] + if !cfg!(target_arch = "x86_64") { + Fail("requires x86_64".into()) + } else if cfg!(target_os = "windows") { + Fail("requires windows".into()) + } else if let Err(e) = check::winml::is_available() { + Fail(e.to_string().into()) + } else { + Run + }, + #[cfg(not(all(feature = "winml", target_os = "windows")))] + Ignore("requires the `winml` feature on windows".into()), + ), + _ => panic!("unknown test program: {} (add to this `match`)", name), + } +} + +fn nn_image_classification() -> Result<()> { + check::openvino::is_installed()?; + check::openvino::are_artifacts_available()?; + let backend = Backend::from(backend::openvino::OpenvinoBackend::default()); + exec::run(NN_IMAGE_CLASSIFICATION, backend, false) +} + +fn nn_image_classification_named() -> Result<()> { + check::openvino::is_installed()?; + check::openvino::are_artifacts_available()?; + let backend = Backend::from(backend::openvino::OpenvinoBackend::default()); + exec::run(NN_IMAGE_CLASSIFICATION_NAMED, backend, true) +} + +#[cfg(feature = "onnx")] +fn nn_image_classification_onnx() -> Result<()> { + check::onnx::are_artifacts_available()?; + let backend = Backend::from(backend::onnxruntime::OnnxBackend::default()); + exec::run(NN_IMAGE_CLASSIFICATION_ONNX, backend, false) +} + +#[cfg(not(feature = "onnx"))] +fn nn_image_classification_onnx() -> Result<()> { + anyhow::bail!("this test requires the `onnx` feature") +} + +#[cfg(all(feature = "winml", target_os = "windows"))] +fn nn_image_classification_winml() -> Result<()> { + check::winml::is_available()?; + check::onnx::are_artifacts_available()?; + let backend = Backend::from(backend::winml::WinMLBackend::default()); + exec::run(NN_IMAGE_CLASSIFICATION_ONNX, backend, false) +} + +#[cfg(not(all(feature = "winml", target_os = "windows")))] +fn nn_image_classification_winml() -> Result<()> { + anyhow::bail!("this test requires the `winml` feature and only runs on windows") +} + +/// Helper for keeping track of what tests should do when pre-test checks fail. +#[derive(Clone)] +enum IgnoreCheck { + Run, + Ignore(Cow<'static, str>), + Fail(Cow<'static, str>), +} + +impl IgnoreCheck { + fn reason(&self) -> &str { + match self { + IgnoreCheck::Run => panic!("cannot get reason for `Run`"), + IgnoreCheck::Ignore(reason) => reason, + IgnoreCheck::Fail(reason) => reason, + } + } + + fn downgrade_failure(self) -> Self { + if let IgnoreCheck::Fail(reason) = self { + IgnoreCheck::Ignore(reason) + } else { + self + } + } + + fn is_ignore(&self) -> bool { + matches!(self, IgnoreCheck::Ignore(_)) + } +}