Skip to content

Commit

Permalink
Merge pull request #391 from L-jasmine/feat/dynamic_link
Browse files Browse the repository at this point in the history
Support dynamic link llama
  • Loading branch information
MarcusDunn authored Jul 9, 2024
2 parents 38686de + 70f26c2 commit fc36685
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 6 deletions.
1 change: 1 addition & 0 deletions llama-cpp-2/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ tracing = { workspace = true }
[features]
cuda = ["llama-cpp-sys-2/cuda"]
metal = ["llama-cpp-sys-2/metal"]
dynamic_link = ["llama-cpp-sys-2/dynamic_link"]
vulkan = ["llama-cpp-sys-2/vulkan"]
native = ["llama-cpp-sys-2/native"]
sampler = []
Expand Down
1 change: 1 addition & 0 deletions llama-cpp-sys-2/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,5 +62,6 @@ cuda = []
f16c = []
fma = []
metal = []
dynamic_link = []
vulkan = []
native = []
29 changes: 23 additions & 6 deletions llama-cpp-sys-2/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,11 +86,11 @@ compile_error!("feature \"vulkan\" cannot be enabled alongside other GPU based f

static LLAMA_PATH: Lazy<PathBuf> = Lazy::new(|| PathBuf::from("./llama.cpp"));

fn compile_bindings(out_path: &Path) {
fn compile_bindings(out_path: &Path, llama_header_path: &Path) {
println!("Generating bindings..");
let bindings = bindgen::Builder::default()
.header(LLAMA_PATH.join("ggml.h").to_string_lossy())
.header(LLAMA_PATH.join("llama.h").to_string_lossy())
// .header(llama_header_path.join("ggml.h").to_string_lossy())
.header(llama_header_path.join("llama.h").to_string_lossy())
.derive_partialeq(true)
.allowlist_function("ggml_.*")
.allowlist_type("ggml_.*")
Expand Down Expand Up @@ -670,18 +670,35 @@ fn compile_llama(mut cxx: Build, _out_path: impl AsRef<Path>) {
}

fn main() {
let out_path = PathBuf::from(env::var("OUT_DIR").expect("No out dir found"));

if cfg!(feature = "dynamic_link") {
println!("cargo:rustc-link-lib=llama");
println!("cargo:rustc-link-lib=ggml");

let llama_header_path = std::env::var("LLAMA_HEADE");
if let Ok(llama_header_path) = llama_header_path {
compile_bindings(&out_path, Path::new(&llama_header_path));
} else {
compile_bindings(&out_path, &LLAMA_PATH);
}

if let Ok(llama_lib_path) = std::env::var("LLAMA_LIB") {
println!("cargo:rustc-link-search={llama_lib_path}");
}
return;
}

if std::fs::read_dir(LLAMA_PATH.as_path()).is_err() {
panic!(
"Could not find {}. Did you forget to initialize submodules?",
LLAMA_PATH.display()
);
}

let out_path = PathBuf::from(env::var("OUT_DIR").expect("No out dir found"));

println!("cargo:rerun-if-changed={}", LLAMA_PATH.display());

compile_bindings(&out_path);
compile_bindings(&out_path, &LLAMA_PATH);

let mut cx = Build::new();
let mut cxx = Build::new();
Expand Down

0 comments on commit fc36685

Please sign in to comment.