diff --git a/Cargo.toml b/Cargo.toml index d6cf18614f..bd6e1a856b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,7 @@ criterion = { version = "0.5.1", default-features=false } cudarc = { version = "0.12.1", features = ["std", "cublas", "cublaslt", "curand", "driver", "nvrtc", "f16", "cuda-version-from-build-system", "dynamic-linking"], default-features=false } fancy-regex = "0.13.0" gemm = { version = "0.17.0", features = ["wasm-simd128-enable"] } -hf-hub = "0.3.0" +hf-hub = { version = "0.3.3", package = "candle-hf-hub" } half = { version = "2.3.1", features = ["num-traits", "use-intrinsics", "rand_distr"] } hound = "3.5.1" image = { version = "0.25.2", default-features = false, features = ["jpeg", "png"] } diff --git a/candle-book/src/inference/hub.md b/candle-book/src/inference/hub.md index e8d8b267db..fb6f9e51f6 100644 --- a/candle-book/src/inference/hub.md +++ b/candle-book/src/inference/hub.md @@ -11,8 +11,8 @@ Then let's start by downloading the [model file](https://huggingface.co/bert-bas ```rust # extern crate candle_core; -# extern crate hf_hub; -use hf_hub::api::sync::Api; +# extern crate candle_hf_hub; +use candle_hf_hub::api::sync::Api; use candle_core::Device; let api = Api::new().unwrap(); @@ -50,8 +50,8 @@ Now that we have our weights, we can use them in our bert architecture: ```rust # extern crate candle_core; # extern crate candle_nn; -# extern crate hf_hub; -# use hf_hub::api::sync::Api; +# extern crate candle_hf_hub; +# use candle_hf_hub::api::sync::Api; # # let api = Api::new().unwrap(); # let repo = api.model("bert-base-uncased".to_string()); diff --git a/candle-examples/examples/llama/main.rs b/candle-examples/examples/llama/main.rs index 7a555b00af..cc99b6c191 100644 --- a/candle-examples/examples/llama/main.rs +++ b/candle-examples/examples/llama/main.rs @@ -139,8 +139,8 @@ fn main() -> Result<()> { Which::V2 => "meta-llama/Llama-2-7b-hf".to_string(), Which::V3 => "meta-llama/Meta-Llama-3-8B".to_string(), Which::V3Instruct => "meta-llama/Meta-Llama-3-8B-Instruct".to_string(), - Which::V31 => "meta-llama/Meta-Llama-3.1-8B".to_string(), - Which::V31Instruct => "meta-llama/Meta-Llama-3.1-8B-Instruct".to_string(), + Which::V31 => "meta-llama/Llama-3.1-8B".to_string(), + Which::V31Instruct => "meta-llama/Llama-3.1-8B-Instruct".to_string(), Which::V32_1b => "meta-llama/Llama-3.2-1B".to_string(), Which::V32_1bInstruct => "meta-llama/Llama-3.2-1B-Instruct".to_string(), Which::V32_3b => "meta-llama/Llama-3.2-3B".to_string(),