diff --git a/crates/lsp-ai/src/config.rs b/crates/lsp-ai/src/config.rs index 89abfa4..30618d4 100644 --- a/crates/lsp-ai/src/config.rs +++ b/crates/lsp-ai/src/config.rs @@ -14,12 +14,12 @@ const fn true_default() -> bool { } #[derive(Clone, Debug, Deserialize, Serialize)] -pub struct PostProcess { - pub extractor: Option, +pub(crate) struct PostProcess { + pub(crate) extractor: Option, #[serde(default = "true_default")] - pub remove_duplicate_start: bool, + pub(crate) remove_duplicate_start: bool, #[serde(default = "true_default")] - pub remove_duplicate_end: bool, + pub(crate) remove_duplicate_end: bool, } impl Default for PostProcess { @@ -34,7 +34,7 @@ impl Default for PostProcess { #[derive(Debug, Clone, Deserialize)] #[serde(tag = "type")] -pub enum ValidSplitter { +pub(crate) enum ValidSplitter { #[serde(rename = "tree_sitter")] TreeSitter(TreeSitter), #[serde(rename = "text_splitter")] @@ -56,11 +56,11 @@ const fn chunk_overlap_default() -> usize { } #[derive(Debug, Clone, Deserialize)] -pub struct TreeSitter { +pub(crate) struct TreeSitter { #[serde(default = "chunk_size_default")] - pub chunk_size: usize, + pub(crate) chunk_size: usize, #[serde(default = "chunk_overlap_default")] - pub chunk_overlap: usize, + pub(crate) chunk_overlap: usize, } impl Default for TreeSitter { @@ -73,39 +73,39 @@ impl Default for TreeSitter { } #[derive(Debug, Clone, Deserialize)] -pub struct TextSplitter { +pub(crate) struct TextSplitter { #[serde(default = "chunk_size_default")] - pub chunk_size: usize, + pub(crate) chunk_size: usize, } #[derive(Debug, Clone, Deserialize, Default)] -pub struct EmbeddingPrefix { +pub(crate) struct EmbeddingPrefix { #[serde(default)] - pub storage: String, + pub(crate) storage: String, #[serde(default)] - pub retrieval: String, + pub(crate) retrieval: String, } #[derive(Debug, Clone, Deserialize)] -pub struct OllamaEmbeddingModel { +pub(crate) struct OllamaEmbeddingModel { // The generate endpoint, default: 'http://localhost:11434/api/embeddings' - pub endpoint: Option, + pub(crate) endpoint: Option, // The model name - pub model: String, + pub(crate) model: String, // The prefix to apply to the embeddings #[serde(default)] - pub prefix: EmbeddingPrefix, + pub(crate) prefix: EmbeddingPrefix, } #[derive(Debug, Clone, Deserialize)] #[serde(tag = "type")] -pub enum ValidEmbeddingModel { +pub(crate) enum ValidEmbeddingModel { #[serde(rename = "ollama")] Ollama(OllamaEmbeddingModel), } #[derive(Debug, Clone, Copy, Deserialize)] -pub enum VectorDataType { +pub(crate) enum VectorDataType { #[serde(rename = "f32")] F32, #[serde(rename = "binary")] @@ -114,11 +114,11 @@ pub enum VectorDataType { #[derive(Debug, Clone, Deserialize)] pub(crate) struct VectorStore { - pub crawl: Option, + pub(crate) crawl: Option, #[serde(default)] - pub splitter: ValidSplitter, - pub embedding_model: ValidEmbeddingModel, - pub data_type: VectorDataType, + pub(crate) splitter: ValidSplitter, + pub(crate) embedding_model: ValidEmbeddingModel, + pub(crate) data_type: VectorDataType, } #[derive(Debug, Clone, Deserialize)] @@ -265,20 +265,20 @@ const fn n_ctx_default() -> u32 { #[cfg(feature = "llama_cpp")] #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] -pub struct LLaMACPP { +pub(crate) struct LLaMACPP { // Which model to use - pub repository: Option, - pub name: Option, - pub file_path: Option, + pub(crate) repository: Option, + pub(crate) name: Option, + pub(crate) file_path: Option, // The layers to put on the GPU #[serde(default = "n_gpu_layers_default")] - pub n_gpu_layers: u32, + pub(crate) n_gpu_layers: u32, // The context size #[serde(default = "n_ctx_default")] - pub n_ctx: u32, + pub(crate) n_ctx: u32, // The maximum requests per second #[serde(default = "max_requests_per_second_default")] - pub max_requests_per_second: f32, + pub(crate) max_requests_per_second: f32, } #[derive(Clone, Debug, Deserialize)] @@ -349,7 +349,7 @@ pub(crate) struct Completion { } #[derive(Clone, Debug, Deserialize)] -pub struct Chat { +pub(crate) struct Chat { // The trigger text pub(crate) trigger: String, // The name to display in the editor @@ -362,7 +362,7 @@ pub struct Chat { } #[derive(Clone, Debug, Deserialize)] -pub struct Action { +pub(crate) struct Action { // The name to display in the editor pub(crate) action_display_name: String, // The model key to use @@ -395,13 +395,13 @@ pub(crate) struct ValidClientParams { } #[derive(Clone, Debug)] -pub struct Config { +pub(crate) struct Config { pub(crate) config: ValidConfig, pub(crate) client_params: ValidClientParams, } impl Config { - pub fn new(mut args: Value) -> Result { + pub(crate) fn new(mut args: Value) -> Result { // Validate that the models specified are there so we can unwrap let configuration_args = args .as_object_mut() @@ -422,23 +422,19 @@ impl Config { // Helpers for the backends /////////// /////////////////////////////////////// - pub fn get_chats(&self) -> &Vec { + pub(crate) fn get_chats(&self) -> &Vec { &self.config.chats } - pub fn get_actions(&self) -> &Vec { + pub(crate) fn get_actions(&self) -> &Vec { &self.config.actions } - pub fn is_completions_enabled(&self) -> bool { - self.config.completion.is_some() - } - - pub fn get_completions_post_process(&self) -> Option<&PostProcess> { + pub(crate) fn get_completions_post_process(&self) -> Option<&PostProcess> { self.config.completion.as_ref().map(|x| &x.post_process) } - pub fn get_completion_transformer_max_requests_per_second(&self) -> anyhow::Result { + pub(crate) fn get_completion_transformer_max_requests_per_second(&self) -> anyhow::Result { match &self .config .models @@ -470,7 +466,7 @@ impl Config { // For teesting use only #[cfg(test)] impl Config { - pub fn default_with_file_store_without_models() -> Self { + pub(crate) fn default_with_file_store_without_models() -> Self { Self { config: ValidConfig { memory: ValidMemoryBackend::FileStore(FileStore { crawl: None }), diff --git a/crates/lsp-ai/src/crawl.rs b/crates/lsp-ai/src/crawl.rs index 1a15f07..2341380 100644 --- a/crates/lsp-ai/src/crawl.rs +++ b/crates/lsp-ai/src/crawl.rs @@ -22,7 +22,7 @@ impl Crawl { } #[instrument(skip(self, f))] - pub fn maybe_do_crawl( + pub(crate) fn maybe_do_crawl( &mut self, triggered_file: Option, mut f: impl FnMut(&config::Crawl, &str) -> anyhow::Result, diff --git a/crates/lsp-ai/src/custom_requests/generation_stream.rs b/crates/lsp-ai/src/custom_requests/generation_stream.rs index a09ed30..7bdb1dc 100644 --- a/crates/lsp-ai/src/custom_requests/generation_stream.rs +++ b/crates/lsp-ai/src/custom_requests/generation_stream.rs @@ -5,12 +5,12 @@ pub(crate) enum GenerationStream {} #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -pub struct GenerationStreamParams { - pub partial_result_token: ProgressToken, +pub(crate) struct GenerationStreamParams { + pub(crate) partial_result_token: ProgressToken, // This field was "mixed-in" from TextDocumentPositionParams #[serde(flatten)] - pub text_document_position: TextDocumentPositionParams, + pub(crate) text_document_position: TextDocumentPositionParams, } #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] diff --git a/crates/lsp-ai/src/embedding_models/mod.rs b/crates/lsp-ai/src/embedding_models/mod.rs index d125848..8f2cb84 100644 --- a/crates/lsp-ai/src/embedding_models/mod.rs +++ b/crates/lsp-ai/src/embedding_models/mod.rs @@ -2,7 +2,7 @@ use crate::config::ValidEmbeddingModel; mod ollama; -pub fn normalize(mut vector: Vec) -> Vec { +fn normalize(mut vector: Vec) -> Vec { let magnitude = (vector.iter().map(|&x| x * x).sum::()).sqrt(); if magnitude != 0.0 { @@ -15,13 +15,13 @@ pub fn normalize(mut vector: Vec) -> Vec { } #[derive(Clone, Copy)] -pub enum EmbeddingPurpose { +pub(crate) enum EmbeddingPurpose { Storage, Retrieval, } #[async_trait::async_trait] -pub trait EmbeddingModel { +pub(crate) trait EmbeddingModel { async fn embed( &self, batch: Vec<&str>, diff --git a/crates/lsp-ai/src/embedding_models/ollama.rs b/crates/lsp-ai/src/embedding_models/ollama.rs index 2a7b362..a1b030d 100644 --- a/crates/lsp-ai/src/embedding_models/ollama.rs +++ b/crates/lsp-ai/src/embedding_models/ollama.rs @@ -8,29 +8,29 @@ use crate::config; use super::{normalize, EmbeddingModel, EmbeddingPurpose}; #[derive(Deserialize)] -pub struct Embed { +pub(crate) struct Embed { embedding: Vec, } #[derive(Deserialize)] -pub struct EmbedError { +pub(crate) struct EmbedError { error: Value, } #[derive(Deserialize)] #[serde(untagged)] -pub enum EmbedResponse { +pub(crate) enum EmbedResponse { Success(Embed), Error(EmbedError), Other(HashMap), } -pub struct Ollama { +pub(crate) struct Ollama { config: config::OllamaEmbeddingModel, } impl Ollama { - pub fn new(config: config::OllamaEmbeddingModel) -> Self { + pub(crate) fn new(config: config::OllamaEmbeddingModel) -> Self { Self { config } } } diff --git a/crates/lsp-ai/src/memory_backends/file_store.rs b/crates/lsp-ai/src/memory_backends/file_store.rs index 28a9243..92f95ca 100644 --- a/crates/lsp-ai/src/memory_backends/file_store.rs +++ b/crates/lsp-ai/src/memory_backends/file_store.rs @@ -28,7 +28,7 @@ impl AdditionalFileStoreParams { } #[derive(Clone)] -pub struct File { +pub(crate) struct File { rope: Rope, tree: Option, } @@ -38,11 +38,11 @@ impl File { Self { rope, tree } } - pub fn rope(&self) -> &Rope { + pub(crate) fn rope(&self) -> &Rope { &self.rope } - pub fn tree(&self) -> Option<&Tree> { + pub(crate) fn tree(&self) -> Option<&Tree> { self.tree.as_ref() } } @@ -278,15 +278,15 @@ impl FileStore { }) } - pub fn file_map(&self) -> &RwLock> { + pub(crate) fn file_map(&self) -> &RwLock> { &self.file_map } - pub fn contains_file(&self, uri: &str) -> bool { + pub(crate) fn contains_file(&self, uri: &str) -> bool { self.file_map.read().contains_key(uri) } - pub fn position_to_byte(&self, position: &TextDocumentPositionParams) -> anyhow::Result { + pub(crate) fn position_to_byte(&self, position: &TextDocumentPositionParams) -> anyhow::Result { let file_map = self.file_map.read(); let uri = position.text_document.uri.to_string(); let file = file_map @@ -494,7 +494,7 @@ impl MemoryBackend for FileStore { // For testing use only #[cfg(test)] impl FileStore { - pub fn default_with_filler_file() -> anyhow::Result { + pub(crate) fn default_with_filler_file() -> anyhow::Result { let config = Config::default_with_file_store_without_models(); let file_store_config = if let config::ValidMemoryBackend::FileStore(file_store_config) = config.config.memory.clone() diff --git a/crates/lsp-ai/src/memory_backends/mod.rs b/crates/lsp-ai/src/memory_backends/mod.rs index 126649b..f9dfb8e 100644 --- a/crates/lsp-ai/src/memory_backends/mod.rs +++ b/crates/lsp-ai/src/memory_backends/mod.rs @@ -11,7 +11,7 @@ mod postgresml; mod vector_store; #[derive(Clone, Debug)] -pub enum PromptType { +pub(crate) enum PromptType { ContextAndCode, FIM, } @@ -33,20 +33,20 @@ impl From<&Value> for MemoryRunParams { } #[derive(Debug)] -pub struct ContextAndCodePrompt { - pub context: String, - pub code: String, - pub selected_text: Option, +pub(crate) struct ContextAndCodePrompt { + pub(crate) context: String, + pub(crate) code: String, + pub(crate) selected_text: Option, } #[derive(Debug)] -pub struct FIMPrompt { - pub prompt: String, - pub suffix: String, +pub(crate) struct FIMPrompt { + pub(crate) prompt: String, + pub(crate) suffix: String, } #[derive(Debug)] -pub enum Prompt { +pub(crate) enum Prompt { FIM(FIMPrompt), ContextAndCode(ContextAndCodePrompt), } @@ -96,10 +96,7 @@ impl<'a> TryFrom<&'a Prompt> for &'a FIMPrompt { } #[async_trait::async_trait] -pub trait MemoryBackend { - async fn init(&self) -> anyhow::Result<()> { - Ok(()) - } +pub(crate) trait MemoryBackend { fn opened_text_document(&self, params: DidOpenTextDocumentParams) -> anyhow::Result<()>; fn code_action_request( &self, @@ -144,7 +141,7 @@ impl TryFrom for Box { // easier to just pass in a default prompt. #[cfg(test)] impl Prompt { - pub fn default_with_cursor() -> Self { + pub(crate) fn default_with_cursor() -> Self { Self::ContextAndCode(ContextAndCodePrompt { context: r#"def test_context():\n pass"#.to_string(), code: r#"def test_code():\n "#.to_string(), @@ -152,14 +149,14 @@ impl Prompt { }) } - pub fn default_fim() -> Self { + pub(crate) fn default_fim() -> Self { Self::FIM(FIMPrompt { prompt: r#"def test_context():\n pass"#.to_string(), suffix: r#"def test_code():\n "#.to_string(), }) } - pub fn default_without_cursor() -> Self { + pub(crate) fn default_without_cursor() -> Self { Self::ContextAndCode(ContextAndCodePrompt { context: r#"def test_context():\n pass"#.to_string(), code: r#"def test_code():\n "#.to_string(), diff --git a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs index a003d7c..ab3ecd4 100644 --- a/crates/lsp-ai/src/memory_backends/postgresml/mod.rs +++ b/crates/lsp-ai/src/memory_backends/postgresml/mod.rs @@ -79,7 +79,7 @@ pub(crate) struct PostgresML { impl PostgresML { #[instrument] - pub fn new( + pub(crate) fn new( mut postgresml_config: config::PostgresML, configuration: Config, ) -> anyhow::Result { diff --git a/crates/lsp-ai/src/memory_backends/vector_store.rs b/crates/lsp-ai/src/memory_backends/vector_store.rs index 9d6ed2e..f0dc51a 100644 --- a/crates/lsp-ai/src/memory_backends/vector_store.rs +++ b/crates/lsp-ai/src/memory_backends/vector_store.rs @@ -329,7 +329,7 @@ impl VectorStoreInner { } } -pub struct VectorStore { +pub(crate) struct VectorStore { file_store: Arc, crawl: Option>>, splitter: Arc>, @@ -340,7 +340,7 @@ pub struct VectorStore { } impl VectorStore { - pub fn new( + pub(crate) fn new( mut vector_store_config: config::VectorStore, config: Config, ) -> anyhow::Result { @@ -703,7 +703,7 @@ impl MemoryBackend for VectorStore { // Get the embedding let embedding = self .embedding_model - .embed(vec![&query], EmbeddingPurpose::Storage) + .embed(vec![&query], EmbeddingPurpose::Retrieval) .await? .into_iter() .nth(0) diff --git a/crates/lsp-ai/src/splitters/mod.rs b/crates/lsp-ai/src/splitters/mod.rs index 5592586..6e7a56a 100644 --- a/crates/lsp-ai/src/splitters/mod.rs +++ b/crates/lsp-ai/src/splitters/mod.rs @@ -6,13 +6,13 @@ mod text_splitter; mod tree_sitter; #[derive(Debug, Serialize)] -pub struct ByteRange { - pub start_byte: usize, - pub end_byte: usize, +pub(crate) struct ByteRange { + pub(crate) start_byte: usize, + pub(crate) end_byte: usize, } impl ByteRange { - pub fn new(start_byte: usize, end_byte: usize) -> Self { + pub(crate) fn new(start_byte: usize, end_byte: usize) -> Self { Self { start_byte, end_byte, @@ -21,9 +21,9 @@ impl ByteRange { } #[derive(Serialize)] -pub struct Chunk { - pub text: String, - pub range: ByteRange, +pub(crate) struct Chunk { + pub(crate) text: String, + pub(crate) range: ByteRange, } impl Chunk { @@ -32,7 +32,7 @@ impl Chunk { } } -pub trait Splitter { +pub(crate) trait Splitter { fn split(&self, file: &File) -> Vec; fn split_file_contents(&self, uri: &str, contents: &str) -> Vec; diff --git a/crates/lsp-ai/src/template.rs b/crates/lsp-ai/src/template.rs index c98634f..bb79fd5 100644 --- a/crates/lsp-ai/src/template.rs +++ b/crates/lsp-ai/src/template.rs @@ -11,7 +11,7 @@ fn template_name_from_template_string(template: &str) -> String { xxhash_rust::xxh3::xxh3_64(template.as_bytes()).to_string() } -pub fn apply_chat_template( +pub(crate) fn apply_chat_template( template: &str, chat_messages: Vec, bos_token: &str, diff --git a/crates/lsp-ai/src/transformer_backends/anthropic.rs b/crates/lsp-ai/src/transformer_backends/anthropic.rs index aab123e..0809a31 100644 --- a/crates/lsp-ai/src/transformer_backends/anthropic.rs +++ b/crates/lsp-ai/src/transformer_backends/anthropic.rs @@ -57,7 +57,7 @@ struct AnthropicChatMessage { } #[derive(Deserialize, Serialize)] -pub struct ChatError { +pub(crate) struct ChatError { error: Value, } diff --git a/crates/lsp-ai/src/transformer_backends/llama_cpp/mod.rs b/crates/lsp-ai/src/transformer_backends/llama_cpp/mod.rs index a1e1ac1..850b1ca 100644 --- a/crates/lsp-ai/src/transformer_backends/llama_cpp/mod.rs +++ b/crates/lsp-ai/src/transformer_backends/llama_cpp/mod.rs @@ -23,23 +23,23 @@ const fn max_new_tokens_default() -> usize { // NOTE: We cannot deny unknown fields as the provided parameters may contain other fields relevant to other processes #[derive(Debug, Deserialize)] -pub struct LLaMACPPRunParams { - pub fim: Option, +pub(crate) struct LLaMACPPRunParams { + pub(crate) fim: Option, messages: Option>, chat_template: Option, // A Jinja template chat_format: Option, // The name of a template in llamacpp #[serde(default = "max_new_tokens_default")] - pub max_tokens: usize, + pub(crate) max_tokens: usize, // TODO: Explore other arguments } -pub struct LLaMACPP { +pub(crate) struct LLaMACPP { model: Model, } impl LLaMACPP { #[instrument] - pub fn new(configuration: config::LLaMACPP) -> anyhow::Result { + pub(crate) fn new(configuration: config::LLaMACPP) -> anyhow::Result { let model_path = match ( &configuration.file_path, &configuration.repository, diff --git a/crates/lsp-ai/src/transformer_backends/llama_cpp/model.rs b/crates/lsp-ai/src/transformer_backends/llama_cpp/model.rs index 79e87a5..4a024ba 100644 --- a/crates/lsp-ai/src/transformer_backends/llama_cpp/model.rs +++ b/crates/lsp-ai/src/transformer_backends/llama_cpp/model.rs @@ -17,14 +17,14 @@ use super::LLaMACPPRunParams; static BACKEND: Lazy = Lazy::new(|| LlamaBackend::init().unwrap()); -pub struct Model { +pub(crate) struct Model { model: LlamaModel, n_ctx: NonZeroU32, } impl Model { #[instrument] - pub fn new(model_path: PathBuf, config: &config::LLaMACPP) -> anyhow::Result { + pub(crate) fn new(model_path: PathBuf, config: &config::LLaMACPP) -> anyhow::Result { // Initialize the model_params let model_params = LlamaModelParams::default().with_n_gpu_layers(config.n_gpu_layers); @@ -42,7 +42,7 @@ impl Model { } #[instrument(skip(self))] - pub fn complete(&self, prompt: &str, params: LLaMACPPRunParams) -> anyhow::Result { + pub(crate) fn complete(&self, prompt: &str, params: LLaMACPPRunParams) -> anyhow::Result { info!("Completing with llama.cpp with prompt:\n{prompt}"); // initialize the context @@ -129,7 +129,7 @@ impl Model { } #[instrument(skip(self))] - pub fn apply_chat_template( + pub(crate) fn apply_chat_template( &self, messages: Vec, template: Option, @@ -144,13 +144,13 @@ impl Model { } #[instrument(skip(self))] - pub fn get_eos_token(&self) -> anyhow::Result { + pub(crate) fn get_eos_token(&self) -> anyhow::Result { let token = self.model.token_eos(); Ok(self.model.token_to_str(token, Special::Tokenize)?) } #[instrument(skip(self))] - pub fn get_bos_token(&self) -> anyhow::Result { + pub(crate) fn get_bos_token(&self) -> anyhow::Result { let token = self.model.token_bos(); Ok(self.model.token_to_str(token, Special::Tokenize)?) } diff --git a/crates/lsp-ai/src/transformer_backends/mod.rs b/crates/lsp-ai/src/transformer_backends/mod.rs index f962564..4358ac6 100644 --- a/crates/lsp-ai/src/transformer_backends/mod.rs +++ b/crates/lsp-ai/src/transformer_backends/mod.rs @@ -19,7 +19,7 @@ mod ollama; mod open_ai; #[async_trait::async_trait] -pub trait TransformerBackend { +pub(crate) trait TransformerBackend { async fn do_completion( &self, prompt: &Prompt, @@ -38,6 +38,7 @@ pub trait TransformerBackend { params: Value, ) -> anyhow::Result; + #[allow(dead_code)] async fn do_generate_stream( &self, request: &GenerationStreamRequest, diff --git a/crates/lsp-ai/src/transformer_backends/ollama.rs b/crates/lsp-ai/src/transformer_backends/ollama.rs index 678b486..befaebc 100644 --- a/crates/lsp-ai/src/transformer_backends/ollama.rs +++ b/crates/lsp-ai/src/transformer_backends/ollama.rs @@ -69,7 +69,7 @@ enum OllamaChatResponse { impl Ollama { #[instrument] - pub fn new(configuration: config::Ollama) -> Self { + pub(crate) fn new(configuration: config::Ollama) -> Self { Self { configuration } } diff --git a/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs b/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs index 0f45254..c75b580 100644 --- a/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs +++ b/crates/lsp-ai/src/transformer_backends/open_ai/mod.rs @@ -106,7 +106,7 @@ pub(crate) enum OpenAIChatResponse { impl OpenAI { #[instrument] - pub fn new(configuration: config::OpenAI) -> Self { + pub(crate) fn new(configuration: config::OpenAI) -> Self { Self { configuration } } diff --git a/crates/lsp-ai/src/transformer_worker.rs b/crates/lsp-ai/src/transformer_worker.rs index 94f29bd..4e7b420 100644 --- a/crates/lsp-ai/src/transformer_worker.rs +++ b/crates/lsp-ai/src/transformer_worker.rs @@ -54,37 +54,37 @@ impl GenerationRequest { // The generate stream is not yet ready but we don't want to remove it #[allow(dead_code)] #[derive(Clone, Debug)] -pub struct GenerationStreamRequest { +pub(crate) struct GenerationStreamRequest { id: RequestId, params: GenerationStreamParams, } impl GenerationStreamRequest { - pub fn new(id: RequestId, params: GenerationStreamParams) -> Self { + pub(crate) fn new(id: RequestId, params: GenerationStreamParams) -> Self { Self { id, params } } } #[derive(Clone, Debug)] -pub struct CodeActionRequest { +pub(crate) struct CodeActionRequest { id: RequestId, params: CodeActionParams, } impl CodeActionRequest { - pub fn new(id: RequestId, params: CodeActionParams) -> Self { + pub(crate) fn new(id: RequestId, params: CodeActionParams) -> Self { Self { id, params } } } #[derive(Clone, Debug)] -pub struct CodeActionResolveRequest { +pub(crate) struct CodeActionResolveRequest { id: RequestId, params: CodeAction, } impl CodeActionResolveRequest { - pub fn new(id: RequestId, params: CodeAction) -> Self { + pub(crate) fn new(id: RequestId, params: CodeAction) -> Self { Self { id, params } } } @@ -112,16 +112,17 @@ impl WorkerRequest { } } -pub struct DoCompletionResponse { - pub insert_text: String, +pub(crate) struct DoCompletionResponse { + pub(crate) insert_text: String, } -pub struct DoGenerationResponse { - pub generated_text: String, +pub(crate) struct DoGenerationResponse { + pub(crate) generated_text: String, } -pub struct DoGenerationStreamResponse { - pub generated_text: String, +#[allow(dead_code)] +pub(crate) struct DoGenerationStreamResponse { + pub(crate) generated_text: String, } fn post_process_start(response: String, front: &str) -> String { diff --git a/crates/lsp-ai/src/utils.rs b/crates/lsp-ai/src/utils.rs index b5d6fcd..5aa44f5 100644 --- a/crates/lsp-ai/src/utils.rs +++ b/crates/lsp-ai/src/utils.rs @@ -77,7 +77,7 @@ pub(crate) fn parse_tree( .with_context(|| format!("parsing tree failed for {uri}")) } -pub fn format_file_chunk(uri: &str, excerpt: &str, root_uri: Option<&str>) -> String { +pub(crate) fn format_file_chunk(uri: &str, excerpt: &str, root_uri: Option<&str>) -> String { let path = match root_uri { Some(root_uri) => { if uri.starts_with(root_uri) {