llama_core/
error.rs

1//! Error types for the Llama Core library.
2
3use thiserror::Error;
4
5/// Error types for the Llama Core library.
6#[derive(Error, Debug)]
7pub enum LlamaCoreError {
8    /// Errors in General operation.
9    #[error("{0}")]
10    Operation(String),
11    /// Errors in Context initialization.
12    #[error("Failed to initialize computation context. Reason: {0}")]
13    InitContext(String),
14    /// Errors thrown by the wasi-nn-ggml plugin and runtime.
15    #[error("{0}")]
16    Backend(#[from] BackendError),
17    /// Errors in file not found.
18    #[error("File not found.")]
19    FileNotFound,
20}
21
22/// Error types for wasi-nn errors.
23#[derive(Error, Debug)]
24pub enum BackendError {
25    /// Errors in setting the input tensor.
26    #[error("{0}")]
27    SetInput(String),
28    /// Errors in the model inference.
29    #[error("{0}")]
30    Compute(String),
31    /// Errors in the model inference in the stream mode.
32    #[error("{0}")]
33    ComputeSingle(String),
34    /// Errors in getting the output tensor.
35    #[error("{0}")]
36    GetOutput(String),
37    /// Errors in getting the output tensor in the stream mode.
38    #[error("{0}")]
39    GetOutputSingle(String),
40    /// Errors in cleaning up the computation context in the stream mode.
41    #[error("{0}")]
42    FinishSingle(String),
43}