Skip to content

Commit

Permalink
clippy and CI
Browse files Browse the repository at this point in the history
  • Loading branch information
santiagomed committed Oct 5, 2023
1 parent 1c4a1d2 commit 05b20a2
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 19 deletions.
7 changes: 3 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,8 @@ jobs:

steps:
- uses: actions/checkout@v3
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose
- uses: davidB/rust-cargo-make@v1
- name: Run CI flow
run: cargo make ci
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
32 changes: 21 additions & 11 deletions src/llm/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ pub mod bert;
pub mod openai;
pub mod request;

use std::fmt::Display;

use anyhow::Result;
use async_openai::types::CreateChatCompletionResponse;
use candle_core::{Device, Result as CandleResult, Tensor};
Expand Down Expand Up @@ -67,17 +69,6 @@ impl From<CreateChatCompletionResponse> for LLMResponse {
}

impl LLMResponse {
/// Get the response content from an LLMResponse
pub fn to_string(&self) -> String {
match self {
LLMResponse::OpenAI(response) => {
ToString::to_string(&response.choices[0].message.content.as_ref().unwrap())
}
LLMResponse::Bert(response) => response.iter().map(|x| x.to_string()).collect::<Vec<String>>().join(", "),
LLMResponse::Empty => "".to_string(),
}
}

/// Get the role of the response from an LLMResponse, if supported by the LLM.
pub fn get_role(&self) -> String {
match self {
Expand All @@ -88,6 +79,25 @@ impl LLMResponse {
}
}

impl Display for LLMResponse {
/// Display the response content from an LLMResponse
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LLMResponse::OpenAI(response) => {
write!(f, "{}", response.choices[0].message.content.as_ref().unwrap())
}
LLMResponse::Bert(response) => {
write!(
f,
"{}",
response.iter().map(|x| x.to_string()).collect::<Vec<String>>().join(", ")
)
}
LLMResponse::Empty => write!(f, ""),
}
}
}

impl Default for LLMResponse {
/// Default LLMResponse is Empty
fn default() -> Self {
Expand Down
8 changes: 4 additions & 4 deletions src/prompt/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ impl<'p> TemplateEngine<'p> {
pub fn render(&self) -> Result<Box<dyn Prompt>> {
let rendered = self.handlebars.render_template(&self.template, &HashMap::<String, String>::new())?;
match serde_json::from_str::<ChatPrompt>(&rendered) {
Ok(chat) => return Ok(Box::new(chat)),
Err(_) => return Ok(Box::new(rendered)),
Ok(chat) => Ok(Box::new(chat)),
Err(_) => Ok(Box::new(rendered)),
}
}

Expand Down Expand Up @@ -131,8 +131,8 @@ impl<'p> TemplateEngine<'p> {
{
let rendered = self.handlebars.render_template(&self.template, &data)?;
match serde_json::from_str::<ChatPrompt>(&rendered) {
Ok(chat) => return Ok(Box::new(chat)),
Err(_) => return Ok(Box::new(rendered)),
Ok(chat) => Ok(Box::new(chat)),
Err(_) => Ok(Box::new(rendered)),
}
}

Expand Down

0 comments on commit 05b20a2

Please sign in to comment.