Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add device type #108

Merged
merged 2 commits into from
May 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions crates/openvino/src/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
use crate::error::LoadingError;
use crate::{cstr, drop_using_function, try_unsafe, util::Result};
use crate::{model::CompiledModel, Model};
use crate::{SetupError, Tensor};
use crate::{DeviceType, SetupError, Tensor};
use openvino_sys::{
self, ov_core_compile_model, ov_core_create, ov_core_create_with_config, ov_core_free,
ov_core_read_model, ov_core_read_model_from_memory_buffer, ov_core_t,
};
use std::ffi::CString;
abrown marked this conversation as resolved.
Show resolved Hide resolved

/// See [`Core`](https://docs.openvino.ai/2023.3/api/c_cpp_api/group__ov__core__c__api.html).
pub struct Core {
Expand Down Expand Up @@ -68,13 +69,14 @@ impl Core {
}

/// Compile a model to `CompiledModel`.
pub fn compile_model(&mut self, model: &Model, device: &str) -> Result<CompiledModel> {
pub fn compile_model(&mut self, model: &Model, device: DeviceType) -> Result<CompiledModel> {
let device: CString = device.into();
let mut compiled_model = std::ptr::null_mut();
let num_property_args = 0;
try_unsafe!(ov_core_compile_model(
self.ptr,
model.as_ptr(),
cstr!(device),
device.as_ptr(),
num_property_args,
std::ptr::addr_of_mut!(compiled_model)
))?;
Expand Down
87 changes: 87 additions & 0 deletions crates/openvino/src/device_type.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
use std::borrow::Cow;
use std::convert::Infallible;
use std::ffi::CString;
use std::fmt::{Display, Formatter};
use std::str::FromStr;

/// `DeviceType` represents accelerator devices.
#[derive(Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]
pub enum DeviceType<'a> {
/// [CPU Device](https://docs.openvino.ai/2024/openvino-workflow/running-inference/inference-devices-and-modes/cpu-device.html)
CPU,
/// [GPU Device](https://docs.openvino.ai/2024/openvino-workflow/running-inference/inference-devices-and-modes/gpu-device.html)
GPU,
/// [NPU Device](https://docs.openvino.ai/2024/openvino-workflow/running-inference/inference-devices-and-modes/npu-device.html)
NPU,
/// [GNA Device](https://docs.openvino.ai/2023.3/openvino_docs_OV_UG_supported_plugins_GNA.html)
#[deprecated = "Deprecated since OpenVINO 2024.0; use NPU device instead"]
GNA,
/// Arbitrary device.
Other(Cow<'a, str>),
}

impl DeviceType<'_> {
/// Creates a device type with owned string data.
pub fn to_owned(&self) -> DeviceType<'static> {
match self {
DeviceType::CPU => DeviceType::CPU,
DeviceType::GPU => DeviceType::GPU,
DeviceType::NPU => DeviceType::NPU,
#[allow(deprecated)]
DeviceType::GNA => DeviceType::GNA,
DeviceType::Other(s) => DeviceType::Other(Cow::Owned(s.clone().into_owned())),
}
}
}

impl AsRef<str> for DeviceType<'_> {
fn as_ref(&self) -> &str {
match self {
DeviceType::CPU => "CPU",
DeviceType::GPU => "GPU",
DeviceType::NPU => "NPU",
#[allow(deprecated)]
DeviceType::GNA => "GNA",
DeviceType::Other(s) => s,
}
}
}

impl<'a> From<&'a DeviceType<'a>> for &'a str {
fn from(value: &'a DeviceType) -> Self {
value.as_ref()
}
}

impl<'a> From<DeviceType<'a>> for CString {
fn from(value: DeviceType) -> Self {
CString::new(value.as_ref()).expect("a valid C string")
}
}

impl<'a> From<&'a str> for DeviceType<'a> {
fn from(s: &'a str) -> Self {
match s {
"CPU" => DeviceType::CPU,
"GPU" => DeviceType::GPU,
"NPU" => DeviceType::NPU,
#[allow(deprecated)]
"GNA" => DeviceType::GNA,
s => DeviceType::Other(Cow::Borrowed(s)),
}
}
}

impl FromStr for DeviceType<'static> {
type Err = Infallible;

fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(DeviceType::from(s).to_owned())
}
}

impl Display for DeviceType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(self.into())
}
}
2 changes: 2 additions & 0 deletions crates/openvino/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
)]

mod core;
mod device_type;
mod dimension;
mod element_type;
mod error;
Expand All @@ -42,6 +43,7 @@ mod tensor;
mod util;

pub use crate::core::Core;
pub use device_type::DeviceType;
pub use dimension::Dimension;
pub use element_type::ElementType;
pub use error::{InferenceError, LoadingError, SetupError};
Expand Down
6 changes: 4 additions & 2 deletions crates/openvino/tests/classify-alexnet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ mod util;

use anyhow::Ok;
use fixtures::alexnet::Fixture;
use openvino::{prepostprocess, Core, ElementType, Layout, ResizeAlgorithm, Shape, Tensor};
use openvino::{
prepostprocess, Core, DeviceType, ElementType, Layout, ResizeAlgorithm, Shape, Tensor,
};
use std::fs;
use util::{Prediction, Predictions};

Expand Down Expand Up @@ -45,7 +47,7 @@ fn classify_alexnet() -> anyhow::Result<()> {
let new_model = pre_post_process.build_new_model()?;

// Compile the model and infer the results.
let mut executable_model = core.compile_model(&new_model, "CPU")?;
let mut executable_model = core.compile_model(&new_model, DeviceType::CPU)?;
let mut infer_request = executable_model.create_infer_request()?;
infer_request.set_tensor("data", &tensor)?;
infer_request.infer()?;
Expand Down
6 changes: 4 additions & 2 deletions crates/openvino/tests/classify-inception.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ mod util;

use anyhow::Ok;
use fixtures::inception::Fixture;
use openvino::{prepostprocess, Core, ElementType, Layout, ResizeAlgorithm, Shape, Tensor};
use openvino::{
prepostprocess, Core, DeviceType, ElementType, Layout, ResizeAlgorithm, Shape, Tensor,
};
use std::fs;
use util::{Prediction, Predictions};

Expand Down Expand Up @@ -42,7 +44,7 @@ fn classify_inception() -> anyhow::Result<()> {
let new_model = pre_post_process.build_new_model()?;

// Compile the model and infer the results.
let mut executable_model = core.compile_model(&new_model, "CPU")?;
let mut executable_model = core.compile_model(&new_model, DeviceType::CPU)?;
let mut infer_request = executable_model.create_infer_request()?;
infer_request.set_tensor("input", &tensor)?;
infer_request.infer()?;
Expand Down
6 changes: 4 additions & 2 deletions crates/openvino/tests/classify-mobilenet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ mod fixtures;
mod util;

use fixtures::mobilenet::Fixture;
use openvino::{prepostprocess, Core, ElementType, Layout, ResizeAlgorithm, Shape, Tensor};
use openvino::{
prepostprocess, Core, DeviceType, ElementType, Layout, ResizeAlgorithm, Shape, Tensor,
};
use std::fs;
use util::{Prediction, Predictions};

Expand Down Expand Up @@ -45,7 +47,7 @@ fn classify_mobilenet() -> anyhow::Result<()> {
let new_model = pre_post_process.build_new_model()?;

// Compile the model and infer the results.
let mut executable_model = core.compile_model(&new_model, "CPU")?;
let mut executable_model = core.compile_model(&new_model, DeviceType::CPU)?;
let mut infer_request = executable_model.create_infer_request()?;
infer_request.set_tensor("input", &tensor)?;
infer_request.infer()?;
Expand Down
Loading