Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add Ollama support #1001

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions locales/app.yml
Original file line number Diff line number Diff line change
Expand Up @@ -793,3 +793,8 @@ _version: 2
es: "No se pueden actualizar las aplicaciones de Microsoft Store, se requiere intervención manual"
fr: "Impossible de mettre à jour les applications du Microsoft Store, une intervention manuelle est nécessaire"
zh_TW: "無法更新 Microsoft Store 應用,需手動幹預"
"Pulling model '{model_name}'":
en: "Pulling model '%{model_name}'"
es: "Extrayendo el modelo '%{model_name}'"
fr: "Récupération du modèle '%{model_name}'"
zh_TW: "正在拉取模型 '%{model_name}'"
1 change: 1 addition & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ pub enum Step {
Myrepos,
Nix,
Node,
Ollama,
Opam,
Pacdef,
Pacstall,
Expand Down
1 change: 1 addition & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -435,6 +435,7 @@ fn run() -> Result<()> {
runner.execute(Step::Zvm, "ZVM", || generic::run_zvm(&ctx))?;
runner.execute(Step::Aqua, "aqua", || generic::run_aqua(&ctx))?;
runner.execute(Step::Bun, "bun", || generic::run_bun(&ctx))?;
runner.execute(Step::Ollama, "Ollama", || generic::run_ollama_pull(&ctx))?;

if should_run_powershell {
runner.execute(Step::Powershell, "Powershell Modules Update", || {
Expand Down
41 changes: 40 additions & 1 deletion src/steps/generic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ use crate::execution_context::ExecutionContext;
use crate::executor::ExecutorOutput;
use crate::terminal::{print_separator, shell};
use crate::utils::{self, check_is_python_2_or_shim, get_require_sudo_string, require, require_option, which, PathExt};
use crate::Step;
use crate::HOME_DIR;
use crate::{
error::{SkipStep, StepFailed, TopgradeError},
terminal::print_warning,
};
use crate::{print_info, Step};

#[cfg(target_os = "linux")]
pub fn is_wsl() -> Result<bool> {
Expand Down Expand Up @@ -1155,3 +1155,42 @@ pub fn run_bun(ctx: &ExecutionContext) -> Result<()> {

ctx.run_type().execute(bun).arg("upgrade").status_checked()
}

/// Pull all the installed LLMs.
pub fn run_ollama_pull(ctx: &ExecutionContext) -> Result<()> {
let ollama = require("ollama")?;

print_separator("Ollama");

// Example output (stdout)
//
// ```
// NAME ID SIZE MODIFIED
// gemma2:2b 8ccf136fdd52 1.6 GB 6 minutes ago
// moondream:latest 55fc3abd3867 1.7 GB 4 hours ago
// ```
//
// We use `std::process::Command` here so that we can still collect the model
// list even in dry run.
let ollama_list_output = Command::new(&ollama).arg("list").output_checked_utf8()?;
let ollama_list_stdout = ollama_list_output.stdout;
// trim the last new-line character, or `stdout.split('\n')` would give us an empty string
let ollama_list_stdout_trimmed = ollama_list_stdout.trim_end_matches(|char| char == '\n');
// skip(1) to skip the first `NAME ID SIZE MODIFIED` line
let model_lines = ollama_list_stdout_trimmed.split('\n').skip(1);
for model_line in model_lines {
let mut columns = model_line.split_whitespace();
let model_name = columns
.next()
.expect("The format of `ollama list` output has changed, file an issue to Topgrade!");
assert!(model_name.contains(':'), "a tag should be included in the model name");

print_info(t!("Pulling model '{model_name}'", model_name = model_name));
ctx.run_type()
.execute(&ollama)
.args(["pull", model_name])
.status_checked()?;
}

Ok(())
}
Loading