Skip to content

Commit

Permalink
refactor: replace adaptor strings with enum
Browse files Browse the repository at this point in the history
  • Loading branch information
McPatate committed Feb 6, 2024
1 parent 11d2906 commit 307ee39
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 34 deletions.
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
.vscode/
dist/
target/
.DS_Store
Expand Down
67 changes: 37 additions & 30 deletions crates/llm-ls/src/adaptors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,47 +196,54 @@ fn parse_openai_text(text: &str) -> Result<Vec<Generation>> {
}
}

pub(crate) const TGI: &str = "tgi";
pub(crate) const HUGGING_FACE: &str = "huggingface";
pub(crate) const OLLAMA: &str = "ollama";
pub(crate) const OPENAI: &str = "openai";
pub(crate) const DEFAULT_ADAPTOR: &str = HUGGING_FACE;
#[derive(Debug, Default, Deserialize, Serialize)]
#[serde(rename_all = "lowercase")]
pub(crate) enum Adaptor {
#[default]
HuggingFace,
Ollama,
OpenAi,
Tgi,
}

impl Display for Adaptor {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::HuggingFace => write!(f, "huggingface"),
Self::Ollama => write!(f, "ollama"),
Self::OpenAi => write!(f, "openai"),
Self::Tgi => write!(f, "tgi"),
}
}
}

pub fn adapt_body(prompt: String, params: &CompletionParams) -> Result<Value> {
match params
.adaptor
.as_ref()
.unwrap_or(&DEFAULT_ADAPTOR.to_string())
.as_str()
{
TGI => Ok(build_tgi_body(prompt, &params.request_params)),
HUGGING_FACE => Ok(build_api_body(prompt, &params.request_params)),
OLLAMA => Ok(build_ollama_body(prompt, params)),
OPENAI => Ok(build_openai_body(prompt, params)),
adaptor => Err(Error::UnknownAdaptor(adaptor.to_owned())),
match params.adaptor.as_ref().unwrap_or(&Adaptor::default()) {
Adaptor::HuggingFace => Ok(build_api_body(prompt, &params.request_params)),
Adaptor::Ollama => Ok(build_ollama_body(prompt, params)),
Adaptor::OpenAi => Ok(build_openai_body(prompt, params)),
Adaptor::Tgi => Ok(build_tgi_body(prompt, &params.request_params)),
}
}

pub fn adapt_headers(
adaptor: Option<&String>,
adaptor: Option<&Adaptor>,
api_token: Option<&String>,
ide: Ide,
) -> Result<HeaderMap> {
match adaptor.unwrap_or(&DEFAULT_ADAPTOR.to_string()).as_str() {
TGI => build_tgi_headers(api_token, ide),
HUGGING_FACE => build_api_headers(api_token, ide),
OLLAMA => build_ollama_headers(),
OPENAI => build_openai_headers(api_token, ide),
adaptor => Err(Error::UnknownAdaptor(adaptor.to_owned())),
match adaptor.unwrap_or(&Adaptor::default()) {
Adaptor::HuggingFace => build_api_headers(api_token, ide),
Adaptor::Ollama => build_ollama_headers(),
Adaptor::OpenAi => build_openai_headers(api_token, ide),
Adaptor::Tgi => build_tgi_headers(api_token, ide),
}
}

pub fn parse_generations(adaptor: Option<&String>, text: &str) -> Result<Vec<Generation>> {
match adaptor.unwrap_or(&DEFAULT_ADAPTOR.to_string()).as_str() {
TGI => parse_tgi_text(text),
HUGGING_FACE => parse_api_text(text),
OLLAMA => parse_ollama_text(text),
OPENAI => parse_openai_text(text),
adaptor => Err(Error::UnknownAdaptor(adaptor.to_owned())),
pub fn parse_generations(adaptor: Option<&Adaptor>, text: &str) -> Result<Vec<Generation>> {
match adaptor.unwrap_or(&Adaptor::default()) {
Adaptor::HuggingFace => parse_api_text(text),
Adaptor::Ollama => parse_ollama_text(text),
Adaptor::OpenAi => parse_openai_text(text),
Adaptor::Tgi => parse_tgi_text(text),
}
}
6 changes: 3 additions & 3 deletions crates/llm-ls/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use adaptors::{adapt_body, adapt_headers, parse_generations};
use adaptors::{adapt_body, adapt_headers, parse_generations, Adaptor};
use clap::Parser;
use document::Document;
use error::{Error, Result};
Expand Down Expand Up @@ -285,7 +285,7 @@ pub struct CompletionParams {
fim: FimParams,
api_token: Option<String>,
model: String,
adaptor: Option<String>,
adaptor: Option<Adaptor>,
tokens_to_clear: Vec<String>,
tokenizer_config: Option<TokenizerConfig>,
context_window: usize,
Expand Down Expand Up @@ -601,7 +601,7 @@ impl Backend {
"received completion request for {}",
params.text_document_position.text_document.uri
);
let is_using_inference_api = params.adaptor.as_ref().unwrap_or(&adaptors::DEFAULT_ADAPTOR.to_owned()).as_str() == adaptors::HUGGING_FACE;
let is_using_inference_api = matches!(params.adaptor.as_ref().unwrap_or(&Adaptor::default()), Adaptor::HuggingFace);
if params.api_token.is_none() && is_using_inference_api {
let now = Instant::now();
let unauthenticated_warn_at = self.unauthenticated_warn_at.read().await;
Expand Down

0 comments on commit 307ee39

Please sign in to comment.