Skip to content

Commit

Permalink
Add support for all Groq and Ollama models, add more tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ad-si committed Apr 29, 2024
1 parent 136ed44 commit 5e273cd
Show file tree
Hide file tree
Showing 9 changed files with 537 additions and 86 deletions.
98 changes: 98 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,5 @@ tokio = { version = "1.37.0", features = ["rt-multi-thread", "macros"] }
xdg = "2.5.2"
futures = "0.3.30"
bat = "0.24.0"
assert_cmd = "2.0.14"
predicates = "3.1.0"
114 changes: 114 additions & 0 deletions build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
use std::env;
use std::fs;
use std::path::Path;

const CONST_ASSIGNMENTS: [(&str, &str); 9] = [
// GROQ
("GROQ_LLAMA", "llama3-8b-8192"),
("GROQ_LLAMA_70", "llama3-70b-8192"),
("GROQ_MIXTRAL", "mixtral-8x7b-32768"),
("GROQ_GEMMA", "gemma-7b-it"),
// OPENAI
("OPENAI_GPT", "gpt-4"),
("OPENAI_GPT_TURBO", "gpt-4-turbo"),
// ANTHROPIC
("CLAUDE_OPUS", "claude-3-opus-20240229"),
("CLAUDE_SONNET", "claude-3-sonnet-20240229"),
("CLAUDE_HAIKU", "claude-3-haiku-20240307"),
];

const GROQ_MODEL_MAPPING_SRC: [(&str, &str); 11] = [
("llama3", "GROQ_LLAMA"),
("llama", "GROQ_LLAMA"),
("ll", "GROQ_LLAMA"),
("llama3-70", "GROQ_LLAMA_70"),
("llama-70", "GROQ_LLAMA_70"),
("ll-70", "GROQ_LLAMA_70"),
("ll70", "GROQ_LLAMA_70"),
("mixtral", "GROQ_MIXTRAL"),
("mi", "GROQ_MIXTRAL"),
("gemma", "GROQ_GEMMA"),
("ge", "GROQ_GEMMA"),
];

const OLLAMA_MODEL_MAPPING_SRC: [(&str, &str); 10] = [
("llama", "llama3"),
("ll", "llama3"),
("llama2", "llama2"),
("ll2", "llama2"),
("mi", "mixtral"),
("mis", "mistral"),
("ge", "gemma"),
("cg", "codegemma"),
("cr", "command-r"),
("crp", "command-r-plus"),
];

fn pretty_print_mapping(use_lookup: bool, mapping: &[(&str, &str)]) -> String {
mapping
.iter()
.map(|(alias, model)| {
let full_name = if use_lookup {
CONST_ASSIGNMENTS
.iter()
.find(|(constant_name, _)| constant_name == model)
.unwrap()
.1
} else {
model
};
format!(" {: <9} → {full_name}\n", *alias)
})
.collect::<String>()
}

fn main() {
let models_rs_content = include_str!("src_templates/models.rs");

let out_dir = env::var("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("models.rs");

// Write the hashmap and its pretty representation to the file
let code = models_rs_content
.replace(
"// {const_assignments}",
&CONST_ASSIGNMENTS
.iter()
.map(|(constant, value)| {
format!("pub const {constant}: &str = \"{value}\";\n")
})
.collect::<String>(),
)
.replace(
"// {groq_model_hashmap}",
&GROQ_MODEL_MAPPING_SRC
.iter()
.map(|(model, constant)| {
let full_name = CONST_ASSIGNMENTS
.iter()
.find(|(constant_name, _)| constant_name == constant)
.unwrap()
.1;
format!("(\"{model}\", \"{full_name}\"),\n")
})
.collect::<String>(),
)
.replace(
"{groq_models_pretty}",
&pretty_print_mapping(true, &GROQ_MODEL_MAPPING_SRC),
)
.replace(
"// {ollama_model_hashmap}",
&OLLAMA_MODEL_MAPPING_SRC
.iter()
.map(|(model, constant)| format!("(\"{model}\", \"{constant}\"),\n"))
.collect::<String>(),
)
.replace(
"{ollama_models_pretty}",
&pretty_print_mapping(false, &OLLAMA_MODEL_MAPPING_SRC),
);

fs::write(&dest_path, code).unwrap();
println!("cargo:rerun-if-changed=build.rs");
}
16 changes: 11 additions & 5 deletions makefile
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,21 @@ update-readme: usage.txt
readme.md


.PHONY: test-rust
test-rust:
cargo test -- --show-output
.PHONY: test-units
test-units:
cargo test --lib --bins -- --show-output
@echo "✅ All unit tests passed!\n\n"


.PHONY: test-cli
test-cli:
cargo test --test integration_tests


.PHONY: test
test: test-rust update-readme
test: test-units update-readme


.PHONY: install
install:
install: update-readme
cargo install --path .
46 changes: 28 additions & 18 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,17 +66,25 @@ Usage: cai [PROMPT]...
cai <COMMAND>
Commands:
mixtral Groq's Mixtral [aliases: mi]
gpt-turbo OpenAI's GPT 4 Turbo [aliases: tu]
gpt OpenAI's GPT 4 [aliases: gp]
claude-opus Anthropic's Claude Opus [aliases: op]
claude-sonnet Anthropic's Claude Sonnet [aliases: so]
claude-haiku 🏆 Default | Anthropic's Claude Haiku [aliases: ha]
llamafile Llamafile server hosted at http://localhost:8080 [aliases: lf]
ollama Ollama server hosted at http://localhost:11434 [aliases: ol]
all Send prompt to each provider's default model simultaneously (Claude Haiku, Groq Mixtral, GPT 4 Turbo,
Llamafile, Ollama Llama2)
help Print this message or the help of the given subcommand(s)
groq [aliases: gr]
mi - Mixtral shortcut
ll - Llama 3 shortcut (🏆 Default)
openai OpenAI [aliases: op]
gp - GPT 4 shortcut
gt - GPT 4 Turbo shortcut
anthropic Anthropic [aliases: an]
cl - Claude Opus
so - Claude Sonnet
ha - Claude Haiku
llamafile Llamafile server hosted at http://localhost:8080 [aliases: lf]
ollama Ollama server hosted at http://localhost:11434 [aliases: ol]
all Send prompt to each provider's default model simultaneously
- Groq Llama3
- Antropic Claude Haiku
- OpenAI GPT 4 Turbo
- Ollama Phi3
- Llamafile
help Print this message or the help of the given subcommand(s)
Arguments:
[PROMPT]... The prompt to send to the AI model
Expand All @@ -87,18 +95,20 @@ Options:
Examples:
# Send a prompt to the default model
cai How heigh is the Eiffel Tower in meters
cai Which year did the Titanic sink
# Send a prompt to each provider's default model
cai all How heigh is the Eiffel Tower in meters
cai all Which year did the Titanic sink
# Send a prompt to Anthropic's Claude Opus (+ alias)
cai claude-opus How heigh is the Eiffel Tower in meters
cai op How heigh is the Eiffel Tower in meters
# Send a prompt to Anthropic's Claude Opus
cai anthropic claude-opus Which year did the Titanic sink
cai an claude-opus Which year did the Titanic sink
cai cl Which year did the Titanic sink
cai anthropic claude-3-opus-20240229 Which year did the Titanic sink
# Send a prompt to locally running Ollama server
cai ollama mistral How heigh is the Eiffel Tower in meters
cai ol mistral How heigh is the Eiffel Tower in meters
cai ollama llama3 Which year did the Titanic sink
cai ol ll Which year did the Titanic sink
# Add data via stdin
cat main.rs | cai Explain this code
Expand Down

0 comments on commit 5e273cd

Please sign in to comment.