Skip to content

Commit

Permalink
refactor: error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
McPatate committed Jul 10, 2024
1 parent 1f70756 commit 1f0afff
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 23 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 5 additions & 2 deletions crates/lsp-ai/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,16 @@ tokio = { version = "1.36.0", features = ["rt-multi-thread", "time"] }
indexmap = "2.2.5"
async-trait = "0.1.78"
tree-sitter = "0.22"
utils-tree-sitter = { path = "../utils-tree-sitter", features = ["all"], version = "0.1.0" }
utils-tree-sitter = { path = "../utils-tree-sitter", features = [
"all",
], version = "0.1.0" }
splitter-tree-sitter = { path = "../splitter-tree-sitter", version = "0.1.0" }
text-splitter = { version = "0.13.3" }
md5 = "0.7.0"
thiserror = "1"

[build-dependencies]
cc="*"
cc = "*"

[features]
default = []
Expand Down
46 changes: 27 additions & 19 deletions crates/lsp-ai/src/config.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,21 @@
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;

#[derive(thiserror::Error, Debug)]
pub(crate) enum ConfigError {
#[error("completion is disabled")]
CompletionDisabled,
#[error("`{0}` model not found in `models` config")]
ModelNotFound(String),
#[error("lsp-ai does not currently provide a default configuration. Please pass a configuration. See https://github.com/SilasMarvin/lsp-ai for configuration options and examples")]
NoDefaultConfig,
#[error("server configuration must be a valid JSON object")]
NotJson,
#[error("serde json error: {0}")]
SerdeJson(#[from] serde_json::Error),
}

pub(crate) type Kwargs = HashMap<String, Value>;

const fn max_requests_per_second_default() -> f32 {
Expand Down Expand Up @@ -315,15 +328,15 @@ pub struct Config {
}

impl Config {
pub fn new(mut args: Value) -> Result<Self> {
pub fn new(mut args: Value) -> Result<Self, ConfigError> {
// Validate that the models specfied are there so we can unwrap
let configuration_args = args
.as_object_mut()
.context("Server configuration must be a JSON object")?
.ok_or(ConfigError::NotJson)?
.remove("initializationOptions");
let valid_args = match configuration_args {
Some(configuration_args) => serde_json::from_value(configuration_args)?,
None => anyhow::bail!("lsp-ai does not currently provide a default configuration. Please pass a configuration. See https://github.com/SilasMarvin/lsp-ai for configuration options and examples"),
None => return Err(ConfigError::NoDefaultConfig),
};
let client_params: ValidClientParams = serde_json::from_value(args)?;
Ok(Self {
Expand All @@ -344,24 +357,19 @@ impl Config {
self.config.completion.as_ref().map(|x| &x.post_process)
}

pub fn get_completion_transformer_max_requests_per_second(&self) -> anyhow::Result<f32> {
pub fn get_completion_transformer_max_requests_per_second(&self) -> Result<f32, ConfigError> {
let completion_model = &self
.config
.completion
.as_ref()
.ok_or(ConfigError::CompletionDisabled)?
.model;
match &self
.config
.models
.get(
&self
.config
.completion
.as_ref()
.context("Completions is not enabled")?
.model,
)
.with_context(|| {
format!(
"`{}` model not found in `models` config",
&self.config.completion.as_ref().unwrap().model
)
})? {
.get(completion_model)
.ok_or_else(|| ConfigError::ModelNotFound(completion_model.to_owned()))?
{
#[cfg(feature = "llama_cpp")]
ValidModel::LLaMACPP(llama_cpp) => Ok(llama_cpp.max_requests_per_second),
ValidModel::OpenAI(open_ai) => Ok(open_ai.max_requests_per_second),
Expand Down
13 changes: 13 additions & 0 deletions crates/lsp-ai/src/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
use crate::config::ConfigError;

#[derive(thiserror::Error, Debug)]
pub(crate) enum Error {
#[error("config error: {0}")]
Config(#[from] ConfigError),
#[error("io error: {0}")]
Io(#[from] std::io::Error),
#[error("serde json error: {0}")]
SerdeJson(#[from] serde_json::Error),
}

pub(crate) type Result<T> = std::result::Result<T, Error>;
4 changes: 2 additions & 2 deletions crates/lsp-ai/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use anyhow::Result;

use lsp_server::{Connection, ExtractError, Message, Notification, Request, RequestId};
use lsp_types::{
request::Completion, CompletionOptions, DidChangeTextDocumentParams, DidOpenTextDocumentParams,
Expand All @@ -16,6 +14,7 @@ use tracing_subscriber::{EnvFilter, FmtSubscriber};
mod config;
mod crawl;
mod custom_requests;
mod error;
mod memory_backends;
mod memory_worker;
mod splitters;
Expand All @@ -27,6 +26,7 @@ mod utils;

use config::Config;
use custom_requests::generation::Generation;
use error::Result;
use memory_backends::MemoryBackend;
use transformer_backends::TransformerBackend;
use transformer_worker::{CompletionRequest, GenerationRequest, WorkerRequest};
Expand Down

0 comments on commit 1f0afff

Please sign in to comment.