-
Notifications
You must be signed in to change notification settings - Fork 18
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(integrations): add ollama support (#214)
- Loading branch information
Showing
8 changed files
with
312 additions
and
33 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
//! # [Swiftide] Indexing with Ollama | ||
//! | ||
//! This example demonstrates how to index the Swiftide codebase itself. | ||
//! Note that for it to work correctly you need to have ollama running on the default local port. | ||
//! | ||
//! The pipeline will: | ||
//! - Loads the readme from the project | ||
//! - Chunk the code into pieces of 10 to 2048 bytes | ||
//! - Run metadata QA on each chunk with Ollama; generating questions and answers and adding metadata | ||
//! - Embed the chunks in batches of 10, Metadata is embedded by default | ||
//! - Store the nodes in Memory Storage | ||
//! | ||
//! [Swiftide]: https://github.com/bosun-ai/swiftide | ||
//! [examples]: https://github.com/bosun-ai/swiftide/blob/master/examples | ||
use swiftide::{ | ||
indexing, | ||
indexing::loaders::FileLoader, | ||
indexing::persist::MemoryStorage, | ||
indexing::transformers::{ChunkMarkdown, Embed, MetadataQAText}, | ||
integrations, | ||
}; | ||
|
||
#[tokio::main] | ||
async fn main() -> Result<(), Box<dyn std::error::Error>> { | ||
tracing_subscriber::fmt::init(); | ||
|
||
let ollama_client = integrations::ollama::Ollama::default() | ||
.with_default_prompt_model("llama3.1") | ||
.to_owned(); | ||
|
||
let fastembed = integrations::fastembed::FastEmbed::try_default()?; | ||
let memory_store = MemoryStorage::default(); | ||
|
||
indexing::Pipeline::from_loader(FileLoader::new("README.md")) | ||
.then_chunk(ChunkMarkdown::from_chunk_range(10..2048)) | ||
.then(MetadataQAText::new(ollama_client.clone())) | ||
.then_in_batch(10, Embed::new(fastembed)) | ||
.then_store_with(memory_store.clone()) | ||
.run() | ||
.await?; | ||
|
||
println!("Example results:"); | ||
println!( | ||
"{}", | ||
memory_store | ||
.get_all_values() | ||
.await | ||
.into_iter() | ||
.flat_map(|n| n.metadata.into_values().map(|v| v.to_string())) | ||
.collect::<Vec<_>>() | ||
.join("\n") | ||
); | ||
Ok(()) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,136 @@ | ||
//! This module provides integration with `Ollama`'s API, enabling the use of language models within the Swiftide project. | ||
//! It includes the `Ollama` struct for managing API clients and default options for prompt models. | ||
//! The module is conditionally compiled based on the "ollama" feature flag. | ||
use derive_builder::Builder; | ||
use std::sync::Arc; | ||
|
||
mod simple_prompt; | ||
|
||
/// The `Ollama` struct encapsulates a `Ollama` client that implements [`swiftide::traits::SimplePrompt`] | ||
/// | ||
/// There is also a builder available. | ||
/// | ||
/// By default it will look for a `OLLAMA_API_KEY` environment variable. Note that a model | ||
/// always needs to be set, either with [`Ollama::with_default_prompt_model`] or via the builder. | ||
/// You can find available models in the Ollama documentation. | ||
/// | ||
/// Under the hood it uses [`async_openai`], with the Ollama openai mapping. This means | ||
/// some features might not work as expected. See the Ollama documentation for details. | ||
#[derive(Debug, Builder, Clone)] | ||
#[builder(setter(into, strip_option))] | ||
pub struct Ollama { | ||
/// The `Ollama` client, wrapped in an `Arc` for thread-safe reference counting. | ||
#[builder(default = "default_client()", setter(custom))] | ||
client: Arc<ollama_rs::Ollama>, | ||
/// Default options for prompt models. | ||
#[builder(default)] | ||
default_options: Options, | ||
} | ||
|
||
impl Default for Ollama { | ||
fn default() -> Self { | ||
Self { | ||
client: default_client(), | ||
default_options: Options::default(), | ||
} | ||
} | ||
} | ||
|
||
/// The `Options` struct holds configuration options for the `Ollama` client. | ||
/// It includes optional fields for specifying the prompt model. | ||
#[derive(Debug, Default, Clone, Builder)] | ||
#[builder(setter(into, strip_option))] | ||
pub struct Options { | ||
/// The default prompt model to use, if specified. | ||
#[builder(default)] | ||
pub prompt_model: Option<String>, | ||
} | ||
|
||
impl Options { | ||
/// Creates a new `OptionsBuilder` for constructing `Options` instances. | ||
pub fn builder() -> OptionsBuilder { | ||
OptionsBuilder::default() | ||
} | ||
} | ||
|
||
impl Ollama { | ||
/// Creates a new `OllamaBuilder` for constructing `Ollama` instances. | ||
pub fn builder() -> OllamaBuilder { | ||
OllamaBuilder::default() | ||
} | ||
|
||
/// Sets a default prompt model to use when prompting | ||
pub fn with_default_prompt_model(&mut self, model: impl Into<String>) -> &mut Self { | ||
self.default_options = Options { | ||
prompt_model: Some(model.into()), | ||
}; | ||
self | ||
} | ||
} | ||
|
||
impl OllamaBuilder { | ||
/// Sets the `Ollama` client for the `Ollama` instance. | ||
/// | ||
/// # Parameters | ||
/// - `client`: The `Ollama` client to set. | ||
/// | ||
/// # Returns | ||
/// A mutable reference to the `OllamaBuilder`. | ||
pub fn client(&mut self, client: ollama_rs::Ollama) -> &mut Self { | ||
self.client = Some(Arc::new(client)); | ||
self | ||
} | ||
|
||
/// Sets the default prompt model for the `Ollama` instance. | ||
/// | ||
/// # Parameters | ||
/// - `model`: The prompt model to set. | ||
/// | ||
/// # Returns | ||
/// A mutable reference to the `OllamaBuilder`. | ||
pub fn default_prompt_model(&mut self, model: impl Into<String>) -> &mut Self { | ||
if let Some(options) = self.default_options.as_mut() { | ||
options.prompt_model = Some(model.into()); | ||
} else { | ||
self.default_options = Some(Options { | ||
prompt_model: Some(model.into()), | ||
}); | ||
} | ||
self | ||
} | ||
} | ||
|
||
fn default_client() -> Arc<ollama_rs::Ollama> { | ||
ollama_rs::Ollama::default().into() | ||
} | ||
|
||
#[cfg(test)] | ||
mod test { | ||
use super::*; | ||
|
||
#[test] | ||
fn test_default_prompt_model() { | ||
let openai = Ollama::builder() | ||
.default_prompt_model("llama3.1") | ||
.build() | ||
.unwrap(); | ||
assert_eq!( | ||
openai.default_options.prompt_model, | ||
Some("llama3.1".to_string()) | ||
); | ||
} | ||
|
||
#[test] | ||
fn test_building_via_default() { | ||
let mut client = Ollama::default(); | ||
|
||
assert!(client.default_options.prompt_model.is_none()); | ||
|
||
client.with_default_prompt_model("llama3.1"); | ||
assert_eq!( | ||
client.default_options.prompt_model, | ||
Some("llama3.1".to_string()) | ||
); | ||
} | ||
} |
Oops, something went wrong.