Skip to content

Commit

Permalink
Change output of test_connection() to boolean #29
Browse files Browse the repository at this point in the history
  • Loading branch information
hauselin committed Dec 26, 2024
1 parent e45b5c5 commit 5651de7
Show file tree
Hide file tree
Showing 32 changed files with 55 additions and 62 deletions.
26 changes: 13 additions & 13 deletions R/ollama.R
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ create_request <- function(endpoint, host = NULL) {
#' @references
#' [API documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion)
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' # text prompt
#' generate("llama3", "The sky is...", stream = FALSE, output = "df")
#' # stream and increase temperature
Expand Down Expand Up @@ -181,7 +181,7 @@ generate <- function(model, prompt, suffix = "", images = "", system = "", templ
#' @return A response in the format specified in the output parameter.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' # one message
#' messages <- list(
#' list(role = "user", content = "How are you doing?")
Expand Down Expand Up @@ -296,7 +296,7 @@ chat <- function(model, messages, tools = list(), stream = FALSE, keep_alive = "
#' @return A response in the format specified in the output parameter.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' create("mario", "FROM llama3\nSYSTEM You are mario from Super Mario Bros.")
#' generate("mario", "who are you?", output = "text") # model should say it's Mario
#' delete("mario") # delete the model created above
Expand Down Expand Up @@ -378,7 +378,7 @@ create <- function(name, modelfile = NULL, stream = FALSE, path = NULL, endpoint
#' @return A response in the format specified in the output parameter.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' list_models() # returns dataframe
#' list_models("df") # returns dataframe
#' list_models("resp") # httr2 response object
Expand Down Expand Up @@ -425,7 +425,7 @@ list_models <- function(output = c("df", "resp", "jsonlist", "raw", "text"), end
#' @return A response in the format specified in the output parameter.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' # show("llama3") # returns jsonlist
#' show("llama3", output = "resp") # returns response object
show <- function(name, verbose = FALSE, output = c("jsonlist", "resp", "raw"), endpoint = "/api/show", host = NULL) {
Expand Down Expand Up @@ -472,7 +472,7 @@ show <- function(name, verbose = FALSE, output = c("jsonlist", "resp", "raw"), e
#' @return A httr2 response object.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' copy("llama3", "llama3_copy")
#' delete("llama3_copy") # delete the model was just got copied
copy <- function(source, destination, endpoint = "/api/copy", host = NULL) {
Expand Down Expand Up @@ -566,7 +566,7 @@ delete <- function(name, endpoint = "/api/delete", host = NULL) {
#' @return A httr2 response object.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' pull("llama3")
#' pull("all-minilm", stream = FALSE)
pull <- function(name, stream = FALSE, insecure = FALSE, endpoint = "/api/pull", host = NULL) {
Expand Down Expand Up @@ -634,7 +634,7 @@ pull <- function(name, stream = FALSE, insecure = FALSE, endpoint = "/api/pull",
#' @return A httr2 response object.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' push("mattw/pygmalion:latest")
push <- function(name, insecure = FALSE, stream = FALSE, output = c("resp", "jsonlist", "raw", "text", "df"), endpoint = "/api/push", host = NULL) {

Expand Down Expand Up @@ -734,7 +734,7 @@ normalize <- function(x) {
#' @return A numeric matrix of the embedding. Each column is the embedding for one input.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' embed("nomic-embed-text:latest", "The quick brown fox jumps over the lazy dog.")
#' # pass multiple inputs
#' embed("nomic-embed-text:latest", c("Good bye", "Bye", "See you."))
Expand Down Expand Up @@ -806,7 +806,7 @@ embed <- function(model, input, truncate = TRUE, normalize = TRUE, keep_alive =
#' @return A numeric vector of the embedding.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' embeddings("nomic-embed-text:latest", "The quick brown fox jumps over the lazy dog.")
#' # pass model options to the model
#' embeddings("nomic-embed-text:latest", "Hello!", temperature = 0.1, num_predict = 3)
Expand Down Expand Up @@ -859,7 +859,7 @@ embeddings <- function(model, prompt, normalize = TRUE, keep_alive = "5m", endpo
#' @return A response in the format specified in the output parameter.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' ps("text")
ps <- function(output = c("df", "resp", "jsonlist", "raw", "text"), endpoint = "/api/ps", host = NULL) {
output <- output[1]
Expand Down Expand Up @@ -905,7 +905,7 @@ ps <- function(output = c("df", "resp", "jsonlist", "raw", "text"), endpoint = "
#' @return Does not return anything. It prints the conversation in the console.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' ohelp(first_prompt = "quit")
#' # regular usage: ohelp()
ohelp <- function(model = "codegemma:7b", ...) {
Expand Down Expand Up @@ -954,7 +954,7 @@ ohelp <- function(model = "codegemma:7b", ...) {
#' @return A logical value indicating if the model exists.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' model_avail("codegemma:7b")
#' model_avail("abc")
#' model_avail("llama3")
Expand Down
8 changes: 4 additions & 4 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
#'
#' @param url The URL of the Ollama server. Default is http://localhost:11434
#'
#' @return A httr2 response object.
#' @return Boolean TRUE if the server is running, otherwise FALSE.
#' @export
#'
#' @examples
Expand All @@ -19,12 +19,12 @@ test_connection <- function(url = "http://localhost:11434") {
{
resp <- httr2::req_perform(req)
message("Ollama local server running")
return(resp)
return(TRUE)
},
error = function(e) {
message("Ollama local server not running or wrong server.\nDownload and launch Ollama app to run the server. Visit https://ollama.com or https://github.com/ollama/ollama")
req$status_code <- 503
return(req)
return(FALSE)
}
)
}
Expand Down Expand Up @@ -81,7 +81,7 @@ stream_handler <- function(x, env, endpoint) {
#' @return A data frame, json list, raw or httr2 response object.
#' @export
#'
#' @examplesIf test_connection()$status_code == 200
#' @examplesIf test_connection()
#' resp <- list_models("resp")
#' resp_process(resp, "df") # parse response to dataframe/tibble
#' resp_process(resp, "jsonlist") # parse response to list
Expand Down
2 changes: 1 addition & 1 deletion man/chat.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/copy.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/create.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/embed.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/embeddings.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/generate.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/list_models.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/model_avail.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/ohelp.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/ps.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/pull.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/push.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/resp_process.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/show.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion man/test_connection.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 5 additions & 5 deletions tests/testthat/test-chat.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ library(testthat)
library(ollamar)

test_that("chat function works with basic input", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

messages <- list(
list(role = "user", content = "Tell me a 5-word story.")
Expand Down Expand Up @@ -56,7 +56,7 @@ test_that("chat function works with basic input", {
})

test_that("chat function handles streaming correctly", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

messages <- list(
list(role = "user", content = "Count to 5")
Expand All @@ -70,7 +70,7 @@ test_that("chat function handles streaming correctly", {


test_that("chat function handles multiple messages", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

messages <- list(
list(role = "user", content = "Hello!"),
Expand All @@ -86,7 +86,7 @@ test_that("chat function handles multiple messages", {
})

test_that("chat function handles additional options", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

messages <- list(
list(role = "user", content = "Tell me a very short joke")
Expand All @@ -102,7 +102,7 @@ test_that("chat function handles additional options", {


test_that("chat function handles images in messages", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")
skip_if_not(model_avail("benzie/llava-phi-3"), "benzie/llava-phi-3 model not available")

images <- c(file.path(system.file("extdata", package = "ollamar"), "image1.png"),
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-copy.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ library(testthat)
library(ollamar)

test_that("copy function works with basic input", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

copy("llama3", "llama3-BACKUP")
expect_true(model_avail("llama3-BACKUP"))
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-create.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ library(testthat)
library(ollamar)

test_that("create function works with basic input", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

expect_error(create("mario"))
expect_error(create("mario", modelfile = "abc", path = "abc"))
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-delete.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ library(testthat)
library(ollamar)

test_that("delete function works", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

# wrong model
expect_invisible(delete("sdafds"))
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-embed.R
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ library(ollamar)
# Note for the following test to work you need to make sure the "all-minilm" model exists locally

test_that("embed function works with basic input", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

# one input
result <- embed("all-minilm", "hello")
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-embeddings.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ library(testthat)
library(ollamar)

test_that("embeddings function works with basic input", {
skip_if_not(test_connection()$status_code == 200, "Ollama server not available")
skip_if_not(test_connection(), "Ollama server not available")

result <- embeddings("all-minilm", "hello")
expect_type(result, "double")
Expand Down
Loading

0 comments on commit 5651de7

Please sign in to comment.