Skip to content

Commit b96a67a

Browse files
committed
Ollama linux support
1 parent 238042f commit b96a67a

File tree

2 files changed

+78
-19
lines changed

2 files changed

+78
-19
lines changed

src-tauri/src/lib.rs

Lines changed: 77 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,30 @@ use std::path::Path;
44
#[cfg(target_os = "windows")]
55
use std::os::windows::process::CommandExt;
66

7+
// Helper function to generate extended PATH based on platform
8+
fn get_extended_path() -> String {
9+
let current_path = std::env::var("PATH").unwrap_or_default();
10+
11+
if cfg!(target_os = "macos") {
12+
format!("{}:/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin", current_path)
13+
} else if cfg!(target_os = "linux") {
14+
format!("{}:/usr/local/bin:/usr/bin:/bin:/home/{}/.local/bin",
15+
current_path,
16+
std::env::var("USER").unwrap_or_default())
17+
} else {
18+
// Windows uses different PATH format and structure
19+
current_path
20+
}
21+
}
22+
723
#[tauri::command]
824
fn get_platform() -> String {
925
if cfg!(target_os = "macos") {
1026
"macos".to_string()
1127
} else if cfg!(target_os = "windows") {
1228
"windows".to_string()
29+
} else if cfg!(target_os = "linux") {
30+
"linux".to_string()
1331
} else {
1432
"unknown".to_string()
1533
}
@@ -38,6 +56,16 @@ fn check_ollama_installation_paths() -> bool {
3856
Path::new(program_files_path).exists() ||
3957
Path::new(program_files_x86_path).exists() ||
4058
Path::new(&appdata_path).exists()
59+
} else if cfg!(target_os = "linux") {
60+
// Check common Linux installation paths
61+
let usr_bin_path = "/usr/bin/ollama";
62+
let usr_local_bin_path = "/usr/local/bin/ollama";
63+
let home_local_bin = format!("{}/.local/bin/ollama",
64+
std::env::var("HOME").unwrap_or_default());
65+
66+
Path::new(usr_bin_path).exists() ||
67+
Path::new(usr_local_bin_path).exists() ||
68+
Path::new(&home_local_bin).exists()
4169
} else {
4270
false
4371
}
@@ -85,8 +113,8 @@ async fn install_ollama_macos() -> Result<String, String> {
85113
let current_path = std::env::var("PATH").unwrap_or_default();
86114
debug_info.push_str(&format!("Current PATH: {}\n", current_path));
87115

88-
// Extended PATH to include common Homebrew locations
89-
let extended_path = format!("{}:/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin", current_path);
116+
// Extended PATH to include platform-specific common locations
117+
let extended_path = get_extended_path();
90118
debug_info.push_str(&format!("Extended PATH: {}\n", extended_path));
91119

92120
// First, try to find brew using which command with extended PATH
@@ -178,16 +206,49 @@ async fn install_ollama_macos() -> Result<String, String> {
178206

179207
#[tauri::command]
180208
async fn install_ollama_windows() -> Result<String, String> {
181-
// On Windows, we'll direct the user to download manually since
182-
// automatic installation requires more complex handling
183-
Err("Please download the Ollama installer from ollama.ai/download and run it manually.".to_string())
209+
// On Windows, redirect user to the official download page
210+
let download_url = "https://ollama.com/download/windows";
211+
212+
// Open the download page in the default browser
213+
match std::process::Command::new("cmd")
214+
.args(["/C", "start", download_url])
215+
.output()
216+
{
217+
Ok(_) => Ok("Opening Ollama download page in your browser. Please download and install Ollama, then restart this app.".to_string()),
218+
Err(e) => Err(format!("Failed to open download page. Please visit {} manually to download Ollama. Error: {}", download_url, e))
219+
}
220+
}
221+
222+
#[tauri::command]
223+
async fn install_ollama_linux() -> Result<String, String> {
224+
// On Linux, use the official install script via curl
225+
let output = Command::new("sh")
226+
.args(["-c", "curl -fsSL https://ollama.com/install.sh | sh"])
227+
.output();
228+
229+
match output {
230+
Ok(output) => {
231+
if output.status.success() {
232+
let stdout = String::from_utf8_lossy(&output.stdout);
233+
Ok(format!("Ollama installed successfully!\n\nOutput:\n{}", stdout.trim()))
234+
} else {
235+
let stderr = String::from_utf8_lossy(&output.stderr);
236+
Err(format!("Failed to install Ollama. Error:\n{}", stderr))
237+
}
238+
}
239+
Err(e) => {
240+
Err(format!(
241+
"Failed to run installation script. Error: {}\n\nPlease try running this command manually in your terminal:\ncurl -fsSL https://ollama.com/install.sh | sh",
242+
e
243+
))
244+
}
245+
}
184246
}
185247

186248
#[tauri::command]
187249
async fn download_ollama_model(model_name: String) -> Result<String, String> {
188-
// Extended PATH to include common Homebrew locations
189-
let extended_path = format!("{}:/opt/homebrew/bin:/usr/local/bin",
190-
std::env::var("PATH").unwrap_or_default());
250+
// Extended PATH to include platform-specific common locations
251+
let extended_path = get_extended_path();
191252

192253
let output = Command::new("ollama")
193254
.args(["pull", &model_name])
@@ -210,9 +271,8 @@ async fn download_ollama_model(model_name: String) -> Result<String, String> {
210271

211272
#[tauri::command]
212273
async fn list_installed_models() -> Result<Vec<String>, String> {
213-
// Extended PATH to include common Homebrew locations
214-
let extended_path = format!("{}:/opt/homebrew/bin:/usr/local/bin",
215-
std::env::var("PATH").unwrap_or_default());
274+
// Extended PATH to include platform-specific common locations
275+
let extended_path = get_extended_path();
216276

217277
let output = Command::new("ollama")
218278
.args(["list"])
@@ -293,11 +353,10 @@ async fn start_ollama_service() -> Result<String, String> {
293353
}
294354
}
295355
} else {
296-
// macOS: Try multiple methods with proper PATH handling
356+
// macOS/Linux: Try multiple methods with proper PATH handling
297357

298-
// Extended PATH to include common Homebrew locations
299-
let extended_path = format!("{}:/opt/homebrew/bin:/usr/local/bin",
300-
std::env::var("PATH").unwrap_or_default());
358+
// Extended PATH to include platform-specific common locations
359+
let extended_path = get_extended_path();
301360

302361
// Method 1: Use nohup for proper daemonization
303362
let nohup_result = Command::new("nohup")
@@ -420,9 +479,8 @@ async fn unload_ollama_model() -> Result<String, String> {
420479

421480
#[tauri::command]
422481
async fn uninstall_ollama_model(model_name: String) -> Result<String, String> {
423-
// Extended PATH to include common Homebrew locations
424-
let extended_path = format!("{}:/opt/homebrew/bin:/usr/local/bin",
425-
std::env::var("PATH").unwrap_or_default());
482+
// Extended PATH to include platform-specific common locations
483+
let extended_path = get_extended_path();
426484

427485
let output = Command::new("ollama")
428486
.args(["rm", &model_name])
@@ -572,6 +630,7 @@ pub fn run() {
572630
check_ollama_service_status,
573631
install_ollama_macos,
574632
install_ollama_windows,
633+
install_ollama_linux,
575634
download_ollama_model,
576635
list_installed_models,
577636
start_ollama_service,

src-tauri/tauri.conf.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
},
3535
"bundle": {
3636
"active": true,
37-
"targets": ["dmg", "msi", "nsis"],
37+
"targets": ["dmg", "msi", "nsis", "appimage", "deb"],
3838
"icon": [
3939
"icons/32x32.png",
4040
"icons/128x128.png",

0 commit comments

Comments
 (0)