From 430024a502ef8b472726f694283eefb22ea7e2d2 Mon Sep 17 00:00:00 2001 From: Flier Lu Date: Sun, 12 Apr 2026 10:56:52 +0800 Subject: [PATCH] Enhance Makefile with new targets for testing and model preparation; update lib.rs and mod.rs for improved functionality; add model files for testing. --- Makefile | 20 +++++++++++++++++++- ollama-rs/src/generation/parameters/mod.rs | 16 +++++++++++++++- ollama-rs/src/lib.rs | 2 +- ollama-rs/tests/model/Modelfile.mario | 9 +++++++++ ollama-rs/tests/model/Modelfile.test_model | 1 + ollama-rs/tests/push_model.rs | 9 +++++++-- 6 files changed, 52 insertions(+), 5 deletions(-) create mode 100644 ollama-rs/tests/model/Modelfile.mario create mode 100644 ollama-rs/tests/model/Modelfile.test_model diff --git a/Makefile b/Makefile index 7fe3f41..54198ad 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,21 @@ +.PHONY: fmt test prepare _ensure_ollama_create _ensure_ollama_model + fmt: cargo fmt - cargo clippy --fix --allow-dirty \ No newline at end of file + cargo clippy --fix --allow-dirty + +test: prepare + cargo test + +prepare: + @$(MAKE) _ensure_ollama_create NAME=mario MODELFILE=ollama-rs/tests/model/Modelfile.mario + @$(MAKE) _ensure_ollama_create NAME=test_model MODELFILE=ollama-rs/tests/model/Modelfile.test_model + @$(MAKE) _ensure_ollama_model MODEL=llama2:latest + @$(MAKE) _ensure_ollama_model MODEL=granite-code:3b + @$(MAKE) _ensure_ollama_model MODEL=llava:latest + +_ensure_ollama_create: + @ollama list 2>/dev/null | awk 'NR>1 {print $$1}' | grep -qxF '$(if $(strip $(MODEL)),$(MODEL),$(NAME):latest)' || ollama create '$(NAME)' -f '$(MODELFILE)' + +_ensure_ollama_model: + @ollama list 2>/dev/null | awk 'NR>1 {print $$1}' | grep -qxF '$(MODEL)' || ollama pull '$(MODEL)' diff --git a/ollama-rs/src/generation/parameters/mod.rs b/ollama-rs/src/generation/parameters/mod.rs index cba55b1..31d1e21 100644 --- a/ollama-rs/src/generation/parameters/mod.rs +++ b/ollama-rs/src/generation/parameters/mod.rs @@ -79,8 +79,16 @@ impl<'de> Deserialize<'de> for FormatType { /// Represents a serialized JSON schema. You can create this by converting /// a JsonSchema: /// ```rust +/// use schemars::{JsonSchema, schema_for}; +/// use ollama_rs::generation::parameters::JsonStructure; +/// +/// #[derive(JsonSchema)] +/// struct Output { +/// foo: i32 +/// } +/// /// let json_schema = schema_for!(Output); -/// let serialized: SerializedJsonSchema = json_schema.into(); +/// let serialized: JsonStructure = json_schema.into(); /// ``` #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct JsonStructure { @@ -105,6 +113,12 @@ impl JsonStructure { } } +impl From for JsonStructure { + fn from(schema: Schema) -> Self { + Self { schema } + } +} + /// Used to control how long a model stays loaded in memory, by default models are unloaded after 5 minutes of inactivity #[derive(Debug, Clone, PartialEq)] pub enum KeepAlive { diff --git a/ollama-rs/src/lib.rs b/ollama-rs/src/lib.rs index 9a55373..efe9ba2 100644 --- a/ollama-rs/src/lib.rs +++ b/ollama-rs/src/lib.rs @@ -29,7 +29,7 @@ pub mod models; /// /// ``` /// use url::Url; -/// use ollama_rs::IntoUrl; +/// use ollama_rs::IntoUrlSealed; /// /// let url: Url = "http://example.com".into_url().unwrap(); /// ``` diff --git a/ollama-rs/tests/model/Modelfile.mario b/ollama-rs/tests/model/Modelfile.mario new file mode 100644 index 0000000..9a2f032 --- /dev/null +++ b/ollama-rs/tests/model/Modelfile.mario @@ -0,0 +1,9 @@ +FROM llama2 + +# set to 1 (higher is more creative, lower is more coherent) +PARAMETER temperature 1 + +# set the system prompt +SYSTEM """ +You are Mario from Super Mario Bros. Anser as Mario, the asistant, only. +""" \ No newline at end of file diff --git a/ollama-rs/tests/model/Modelfile.test_model b/ollama-rs/tests/model/Modelfile.test_model new file mode 100644 index 0000000..22a37f8 --- /dev/null +++ b/ollama-rs/tests/model/Modelfile.test_model @@ -0,0 +1 @@ +FROM llama2 \ No newline at end of file diff --git a/ollama-rs/tests/push_model.rs b/ollama-rs/tests/push_model.rs index ec784f8..f3f57d6 100644 --- a/ollama-rs/tests/push_model.rs +++ b/ollama-rs/tests/push_model.rs @@ -3,14 +3,19 @@ use tokio_stream::StreamExt; #[tokio::test] /// This test needs a local model named `test_model:latest` to work, and requires registering for ollama.ai and adding a public key first. +/// The model name should be in the form of `/:`. async fn test_push_model() { let ollama = Ollama::default(); - let mut res = ollama - .push_model_stream("test_model:latest".into(), false) + let model_name = format!("{}/test_model:latest", env!("USER")); + + ollama + .copy_model("test_model".into(), model_name.clone()) .await .unwrap(); + let mut res = ollama.push_model_stream(model_name, false).await.unwrap(); + while let Some(res) = res.next().await { match res { Ok(res) => println!("{res:?}"),