diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index e36c38239..f92ccca7b 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,3 +1,3 @@ # These are supported funding model platforms -github: [sashabaranov, vvatanabe] +github: [gradientlabs-ai, vvatanabe] diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 536a2ee29..710c1f511 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -8,7 +8,7 @@ assignees: '' --- Your issue may already be reported! -Please search on the [issue tracker](https://github.com/sashabaranov/go-openai/issues) before creating one. +Please search on the [issue tracker](https://github.com/gradientlabs-ai/go-openai/issues) before creating one. **Describe the bug** A clear and concise description of what the bug is. If it's an API-related bug, please provide relevant endpoint(s). diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 2359e5c00..397cfff7d 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -8,7 +8,7 @@ assignees: '' --- Your issue may already be reported! -Please search on the [issue tracker](https://github.com/sashabaranov/go-openai/issues) before creating one. +Please search on the [issue tracker](https://github.com/gradientlabs-ai/go-openai/issues) before creating one. **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 222c065ce..3ef9ba1d7 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,5 @@ A similar PR may already be submitted! -Please search among the [Pull request](https://github.com/sashabaranov/go-openai/pulls) before creating one. +Please search among the [Pull request](https://github.com/gradientlabs-ai/go-openai/pulls) before creating one. If your changes introduce breaking changes, please prefix the title of your pull request with "[BREAKING_CHANGES]". This allows for clear identification of such changes in the 'What's Changed' section on the release page, making it developer-friendly. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4dd184042..392336b50 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,22 +1,22 @@ # Contributing Guidelines ## Overview -Thank you for your interest in contributing to the "Go OpenAI" project! By following this guideline, we hope to ensure that your contributions are made smoothly and efficiently. The Go OpenAI project is licensed under the [Apache 2.0 License](https://github.com/sashabaranov/go-openai/blob/master/LICENSE), and we welcome contributions through GitHub pull requests. +Thank you for your interest in contributing to the "Go OpenAI" project! By following this guideline, we hope to ensure that your contributions are made smoothly and efficiently. The Go OpenAI project is licensed under the [Apache 2.0 License](https://github.com/gradientlabs-ai/go-openai/blob/master/LICENSE), and we welcome contributions through GitHub pull requests. ## Reporting Bugs -If you discover a bug, first check the [GitHub Issues page](https://github.com/sashabaranov/go-openai/issues) to see if the issue has already been reported. If you're reporting a new issue, please use the "Bug report" template and provide detailed information about the problem, including steps to reproduce it. +If you discover a bug, first check the [GitHub Issues page](https://github.com/gradientlabs-ai/go-openai/issues) to see if the issue has already been reported. If you're reporting a new issue, please use the "Bug report" template and provide detailed information about the problem, including steps to reproduce it. ## Suggesting Features -If you want to suggest a new feature or improvement, first check the [GitHub Issues page](https://github.com/sashabaranov/go-openai/issues) to ensure a similar suggestion hasn't already been made. Use the "Feature request" template to provide a detailed description of your suggestion. +If you want to suggest a new feature or improvement, first check the [GitHub Issues page](https://github.com/gradientlabs-ai/go-openai/issues) to ensure a similar suggestion hasn't already been made. Use the "Feature request" template to provide a detailed description of your suggestion. ## Reporting Vulnerabilities -If you identify a security concern, please use the "Report a security vulnerability" template on the [GitHub Issues page](https://github.com/sashabaranov/go-openai/issues) to share the details. This report will only be viewable to repository maintainers. You will be credited if the advisory is published. +If you identify a security concern, please use the "Report a security vulnerability" template on the [GitHub Issues page](https://github.com/gradientlabs-ai/go-openai/issues) to share the details. This report will only be viewable to repository maintainers. You will be credited if the advisory is published. ## Questions for Users -If you have questions, please utilize [StackOverflow](https://stackoverflow.com/) or the [GitHub Discussions page](https://github.com/sashabaranov/go-openai/discussions). +If you have questions, please utilize [StackOverflow](https://stackoverflow.com/) or the [GitHub Discussions page](https://github.com/gradientlabs-ai/go-openai/discussions). ## Contributing Code -There might already be a similar pull requests submitted! Please search for [pull requests](https://github.com/sashabaranov/go-openai/pulls) before creating one. +There might already be a similar pull requests submitted! Please search for [pull requests](https://github.com/gradientlabs-ai/go-openai/pulls) before creating one. ### Requirements for Merging a Pull Request diff --git a/README.md b/README.md index 7946f4d9b..16f9ba87c 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Go OpenAI -[![Go Reference](https://pkg.go.dev/badge/github.com/sashabaranov/go-openai.svg)](https://pkg.go.dev/github.com/sashabaranov/go-openai) -[![Go Report Card](https://goreportcard.com/badge/github.com/sashabaranov/go-openai)](https://goreportcard.com/report/github.com/sashabaranov/go-openai) -[![codecov](https://codecov.io/gh/sashabaranov/go-openai/branch/master/graph/badge.svg?token=bCbIfHLIsW)](https://codecov.io/gh/sashabaranov/go-openai) +[![Go Reference](https://pkg.go.dev/badge/github.com/gradientlabs-ai/go-openai.svg)](https://pkg.go.dev/github.com/gradientlabs-ai/go-openai) +[![Go Report Card](https://goreportcard.com/badge/github.com/gradientlabs-ai/go-openai)](https://goreportcard.com/report/github.com/gradientlabs-ai/go-openai) +[![codecov](https://codecov.io/gh/gradientlabs-ai/go-openai/branch/master/graph/badge.svg?token=bCbIfHLIsW)](https://codecov.io/gh/gradientlabs-ai/go-openai) This library provides unofficial Go clients for [OpenAI API](https://platform.openai.com/). We support: @@ -13,7 +13,7 @@ This library provides unofficial Go clients for [OpenAI API](https://platform.op ## Installation ``` -go get github.com/sashabaranov/go-openai +go get github.com/gradientlabs-ai/go-openai ``` Currently, go-openai requires Go version 1.18 or greater. @@ -28,7 +28,7 @@ package main import ( "context" "fmt" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -80,7 +80,7 @@ import ( "errors" "fmt" "io" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -133,7 +133,7 @@ package main import ( "context" "fmt" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -166,7 +166,7 @@ import ( "context" "fmt" "io" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -215,7 +215,7 @@ import ( "context" "fmt" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -247,7 +247,7 @@ import ( "fmt" "os" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -288,7 +288,7 @@ import ( "context" "encoding/base64" "fmt" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" "image/png" "os" ) @@ -376,7 +376,7 @@ config.HTTPClient = &http.Client{ c := openai.NewClientWithConfig(config) ``` -See also: https://pkg.go.dev/github.com/sashabaranov/go-openai#ClientConfig +See also: https://pkg.go.dev/github.com/gradientlabs-ai/go-openai#ClientConfig
@@ -392,7 +392,7 @@ import ( "os" "strings" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -446,7 +446,7 @@ import ( "context" "fmt" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -492,7 +492,7 @@ package main import ( "context" "log" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) @@ -549,7 +549,7 @@ import ( "context" "fmt" - openai "github.com/sashabaranov/go-openai" + openai "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -680,7 +680,7 @@ package main import ( "context" "fmt" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func main() { @@ -764,7 +764,7 @@ Due to the factors mentioned above, different answers may be returned even for t By adopting these strategies, you can expect more consistent results. **Related Issues:** -[omitempty option of request struct will generate incorrect request when parameter is 0.](https://github.com/sashabaranov/go-openai/issues/9) +[omitempty option of request struct will generate incorrect request when parameter is 0.](https://github.com/gradientlabs-ai/go-openai/issues/9) ### Does Go OpenAI provide a method to count tokens? @@ -775,15 +775,15 @@ For counting tokens, you might find the following links helpful: - [How to count tokens with tiktoken](https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb) **Related Issues:** -[Is it possible to join the implementation of GPT3 Tokenizer](https://github.com/sashabaranov/go-openai/issues/62) +[Is it possible to join the implementation of GPT3 Tokenizer](https://github.com/gradientlabs-ai/go-openai/issues/62) ## Contributing -By following [Contributing Guidelines](https://github.com/sashabaranov/go-openai/blob/master/CONTRIBUTING.md), we hope to ensure that your contributions are made smoothly and efficiently. +By following [Contributing Guidelines](https://github.com/gradientlabs-ai/go-openai/blob/master/CONTRIBUTING.md), we hope to ensure that your contributions are made smoothly and efficiently. ## Thank you -We want to take a moment to express our deepest gratitude to the [contributors](https://github.com/sashabaranov/go-openai/graphs/contributors) and sponsors of this project: +We want to take a moment to express our deepest gratitude to the [contributors](https://github.com/gradientlabs-ai/go-openai/graphs/contributors) and sponsors of this project: - [Carson Kahn](https://carsonkahn.com) of [Spindle AI](https://spindleai.com) To all of you: thank you. You've helped us achieve more than we ever imagined possible. Can't wait to see where we go next, together! diff --git a/api_integration_test.go b/api_integration_test.go index 736040c50..f1fb26c76 100644 --- a/api_integration_test.go +++ b/api_integration_test.go @@ -9,9 +9,9 @@ import ( "os" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" - "github.com/sashabaranov/go-openai/jsonschema" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai/jsonschema" ) func TestAPI(t *testing.T) { diff --git a/assistant.go b/assistant.go index 9415325f8..09189f12f 100644 --- a/assistant.go +++ b/assistant.go @@ -11,7 +11,6 @@ import ( const ( assistantsSuffix = "/assistants" assistantsFilesSuffix = "/files" - openaiAssistantsV1 = "assistants=v1" ) type Assistant struct { @@ -116,7 +115,7 @@ type AssistantFilesList struct { // CreateAssistant creates a new assistant. func (c *Client) CreateAssistant(ctx context.Context, request AssistantRequest) (response Assistant, err error) { req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(assistantsSuffix), withBody(request), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -132,7 +131,7 @@ func (c *Client) RetrieveAssistant( ) (response Assistant, err error) { urlSuffix := fmt.Sprintf("%s/%s", assistantsSuffix, assistantID) req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -149,7 +148,7 @@ func (c *Client) ModifyAssistant( ) (response Assistant, err error) { urlSuffix := fmt.Sprintf("%s/%s", assistantsSuffix, assistantID) req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(urlSuffix), withBody(request), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -165,7 +164,7 @@ func (c *Client) DeleteAssistant( ) (response AssistantDeleteResponse, err error) { urlSuffix := fmt.Sprintf("%s/%s", assistantsSuffix, assistantID) req, err := c.newRequest(ctx, http.MethodDelete, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -203,7 +202,7 @@ func (c *Client) ListAssistants( urlSuffix := fmt.Sprintf("%s%s", assistantsSuffix, encodedValues) req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -219,9 +218,8 @@ func (c *Client) CreateAssistantFile( request AssistantFileRequest, ) (response AssistantFile, err error) { urlSuffix := fmt.Sprintf("%s/%s%s", assistantsSuffix, assistantID, assistantsFilesSuffix) - req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(urlSuffix), - withBody(request), - withBetaAssistantV1()) + req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(urlSuffix), withBody(request), + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -238,7 +236,7 @@ func (c *Client) RetrieveAssistantFile( ) (response AssistantFile, err error) { urlSuffix := fmt.Sprintf("%s/%s%s/%s", assistantsSuffix, assistantID, assistantsFilesSuffix, fileID) req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -255,7 +253,7 @@ func (c *Client) DeleteAssistantFile( ) (err error) { urlSuffix := fmt.Sprintf("%s/%s%s/%s", assistantsSuffix, assistantID, assistantsFilesSuffix, fileID) req, err := c.newRequest(ctx, http.MethodDelete, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -294,7 +292,7 @@ func (c *Client) ListAssistantFiles( urlSuffix := fmt.Sprintf("%s/%s%s%s", assistantsSuffix, assistantID, assistantsFilesSuffix, encodedValues) req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } diff --git a/assistant_test.go b/assistant_test.go index 40de0e50f..8a0766adb 100644 --- a/assistant_test.go +++ b/assistant_test.go @@ -2,14 +2,13 @@ package openai_test import ( "context" - - openai "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" - "encoding/json" "fmt" "net/http" "testing" + + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) // TestAssistant Tests the assistant endpoint of the API using the mocked server. diff --git a/audio.go b/audio.go index 4cbe4fe64..b981ae182 100644 --- a/audio.go +++ b/audio.go @@ -8,7 +8,7 @@ import ( "net/http" "os" - utils "github.com/sashabaranov/go-openai/internal" + utils "github.com/gradientlabs-ai/go-openai/internal" ) // Whisper Defines the models provided by OpenAI to use when processing audio with OpenAI. diff --git a/audio_api_test.go b/audio_api_test.go index a0efc7921..f27067405 100644 --- a/audio_api_test.go +++ b/audio_api_test.go @@ -12,9 +12,9 @@ import ( "strings" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) // TestAudio Tests the transcription and translation endpoints of the API using the mocked server. diff --git a/audio_test.go b/audio_test.go index 5346244c8..8f6a1552b 100644 --- a/audio_test.go +++ b/audio_test.go @@ -8,8 +8,8 @@ import ( "path/filepath" "testing" - "github.com/sashabaranov/go-openai/internal/test" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai/internal/test" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestAudioWithFailingFormBuilder(t *testing.T) { diff --git a/chat.go b/chat.go index efb14fd4c..e5312dedb 100644 --- a/chat.go +++ b/chat.go @@ -10,6 +10,7 @@ import ( // Chat message role defined by the OpenAI API. const ( ChatMessageRoleSystem = "system" + ChatMessageRoleDeveloper = "developer" ChatMessageRoleUser = "user" ChatMessageRoleAssistant = "assistant" ChatMessageRoleFunction = "function" @@ -66,17 +67,27 @@ type ChatMessageImageURL struct { Detail ImageURLDetail `json:"detail,omitempty"` } +// API docs: https://platform.openai.com/docs/guides/pdf-files?api-mode=chat&lang=python +type ChatMessageFileData struct { + FileData string `json:"file_data,omitempty"` + // Filename seems to be a required field, even when passing in the file data as bytes. + // But it doesn't get used or validated in anyway. + Filename string `json:"filename,omitempty"` +} + type ChatMessagePartType string const ( ChatMessagePartTypeText ChatMessagePartType = "text" ChatMessagePartTypeImageURL ChatMessagePartType = "image_url" + ChatMessagePartTypeFile ChatMessagePartType = "file" ) type ChatMessagePart struct { Type ChatMessagePartType `json:"type,omitempty"` Text string `json:"text,omitempty"` ImageURL *ChatMessageImageURL `json:"image_url,omitempty"` + File *ChatMessageFileData `json:"file,omitempty"` } type ChatCompletionMessage struct { @@ -184,18 +195,21 @@ type ChatCompletionResponseFormat struct { // ChatCompletionRequest represents a request structure for chat completion API. type ChatCompletionRequest struct { - Model string `json:"model"` - Messages []ChatCompletionMessage `json:"messages"` - MaxTokens int `json:"max_tokens,omitempty"` - Temperature float32 `json:"temperature,omitempty"` - TopP float32 `json:"top_p,omitempty"` - N int `json:"n,omitempty"` - Stream bool `json:"stream,omitempty"` - Stop []string `json:"stop,omitempty"` - PresencePenalty float32 `json:"presence_penalty,omitempty"` - ResponseFormat *ChatCompletionResponseFormat `json:"response_format,omitempty"` - Seed *int `json:"seed,omitempty"` - FrequencyPenalty float32 `json:"frequency_penalty,omitempty"` + Model string `json:"model"` + Messages []ChatCompletionMessage `json:"messages"` + // MaxTokens should be used for Azure provider + MaxTokens int `json:"max_tokens,omitempty"` + // MaxCompletionTokens should be used for OpenAI provider + MaxCompletionTokens int `json:"max_completion_tokens,omitempty"` + Temperature float32 `json:"temperature,omitempty"` + TopP float32 `json:"top_p,omitempty"` + N int `json:"n,omitempty"` + Stream bool `json:"stream,omitempty"` + Stop []string `json:"stop,omitempty"` + PresencePenalty float32 `json:"presence_penalty,omitempty"` + ResponseFormat *ChatCompletionResponseFormat `json:"response_format,omitempty"` + Seed *int `json:"seed,omitempty"` + FrequencyPenalty float32 `json:"frequency_penalty,omitempty"` // LogitBias is must be a token id string (specified by their token ID in the tokenizer), not a word string. // incorrect: `"logit_bias":{"You": 6}`, correct: `"logit_bias":{"1639": 6}` // refs: https://platform.openai.com/docs/api-reference/chat/create#chat/create-logit_bias @@ -215,7 +229,10 @@ type ChatCompletionRequest struct { FunctionCall any `json:"function_call,omitempty"` Tools []Tool `json:"tools,omitempty"` // This can be either a string or an ToolChoice object. - ToolChoice any `json:"tool_choice,omitempty"` + ToolChoice any `json:"tool_choice,omitempty"` + ReasoningEffort string `json:"reasoning_effort,omitempty"` + // Specifies the latency tier to use for processing the request. + ServiceTier ServiceTier `json:"service_tier,omitempty"` } type ToolType string @@ -274,6 +291,15 @@ type LogProbs struct { Content []LogProb `json:"content"` } +type ServiceTier string + +const ( + ServiceTierAuto ServiceTier = "auto" + ServiceTierDefault ServiceTier = "default" + ServiceTierFlex ServiceTier = "flex" + ServiceTierPriority ServiceTier = "priority" +) + type FinishReason string const ( @@ -315,6 +341,7 @@ type ChatCompletionResponse struct { Choices []ChatCompletionChoice `json:"choices"` Usage Usage `json:"usage"` SystemFingerprint string `json:"system_fingerprint"` + ServiceTier ServiceTier `json:"service_tier,omitempty"` httpHeader } diff --git a/chat_stream_test.go b/chat_stream_test.go index bd571cb48..21ec5c758 100644 --- a/chat_stream_test.go +++ b/chat_stream_test.go @@ -10,8 +10,8 @@ import ( "strconv" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestChatCompletionsStreamWrongModel(t *testing.T) { diff --git a/chat_test.go b/chat_test.go index 520bf5ca4..f9a4df360 100644 --- a/chat_test.go +++ b/chat_test.go @@ -12,9 +12,9 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" - "github.com/sashabaranov/go-openai/jsonschema" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai/jsonschema" ) const ( diff --git a/client.go b/client.go index 9a1c8958d..66578f804 100644 --- a/client.go +++ b/client.go @@ -9,7 +9,7 @@ import ( "net/http" "strings" - utils "github.com/sashabaranov/go-openai/internal" + utils "github.com/gradientlabs-ai/go-openai/internal" ) // Client is OpenAI GPT-3 API client. @@ -89,9 +89,9 @@ func withContentType(contentType string) requestOption { } } -func withBetaAssistantV1() requestOption { +func withBetaAssistantVersion(version string) requestOption { return func(args *requestOptions) { - args.header.Set("OpenAI-Beta", "assistants=v1") + args.header.Set("OpenAI-Beta", fmt.Sprintf("assistants=%s", version)) } } diff --git a/client_test.go b/client_test.go index a08d10f21..6d9cca0d1 100644 --- a/client_test.go +++ b/client_test.go @@ -10,8 +10,8 @@ import ( "reflect" "testing" - "github.com/sashabaranov/go-openai/internal/test" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai/internal/test" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) var errTestRequestBuilderFailed = errors.New("test request builder failed") diff --git a/completion.go b/completion.go index 00f43ff1c..f13969585 100644 --- a/completion.go +++ b/completion.go @@ -12,16 +12,37 @@ var ( ErrCompletionRequestPromptTypeNotSupported = errors.New("the type of CompletionRequest.Prompt only supports string and []string") //nolint:lll ) -// GPT3 Defines the models provided by OpenAI to use when generating +// Defines the models provided by OpenAI to use when generating // completions from OpenAI. -// GPT3 Models are designed for text-based tasks. For code-specific -// tasks, please refer to the Codex series of models. const ( + GPT4Dot1 = "gpt-4.1" + GPT4Dot120250414 = "gpt-4.1-2025-04-14" + GPT4Dot1Mini = "gpt-4.1-mini" + GPT4Dot1Mini20250414 = "gpt-4.1-mini-2025-04-14" + GPT4Dot1Nano = "gpt-4.1-nano" + GPT4Dot1Nano20250414 = "gpt-4.1-nano-2025-04-14" + GPTO3Latest = "o3" + GPTO320250416 = "o3-2025-04-16" + GPTO4MiniLatest = "o4-mini" + GPTO4Mini20250416 = "o4-mini-2025-04-16" + GPTO3MiniLatest = "o3-mini" + GPTO3Mini20250131 = "o3-mini-2025-01-31" + GPTO120241217 = "o1-2024-12-17" + GPTO1Preview = "o1-preview" + GPTO1Preview20240912 = "o1-preview-2024-09-12" + GPTO1Mini = "o1-mini" + GPTO1Mini20240912 = "o1-mini-2024-09-12" GPT432K0613 = "gpt-4-32k-0613" GPT432K0314 = "gpt-4-32k-0314" GPT432K = "gpt-4-32k" GPT40613 = "gpt-4-0613" GPT40314 = "gpt-4-0314" + GPT4o = "gpt-4o" + GPT4o20240513 = "gpt-4o-2024-05-13" + GPT4o20240806 = "gpt-4o-2024-08-06" + GPT4oLatest = "chatgpt-4o-latest" + GPT4oMini = "gpt-4o-mini" + GPT4oMini20240718 = "gpt-4o-mini-2024-07-18" GPT4Turbo = "gpt-4-turbo" GPT4Turbo20240409 = "gpt-4-turbo-2024-04-09" GPT4Turbo0125 = "gpt-4-0125-preview" @@ -37,30 +58,33 @@ const ( GPT3Dot5Turbo16K0613 = "gpt-3.5-turbo-16k-0613" GPT3Dot5Turbo = "gpt-3.5-turbo" GPT3Dot5TurboInstruct = "gpt-3.5-turbo-instruct" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3TextDavinci003 = "text-davinci-003" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3TextDavinci002 = "text-davinci-002" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3TextCurie001 = "text-curie-001" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3TextBabbage001 = "text-babbage-001" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3TextAda001 = "text-ada-001" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3TextDavinci001 = "text-davinci-001" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3DavinciInstructBeta = "davinci-instruct-beta" - GPT3Davinci = "davinci" - GPT3Davinci002 = "davinci-002" - // Deprecated: Will be shut down on January 04, 2024. Use gpt-3.5-turbo-instruct instead. + // Deprecated: Model is shutdown. Use davinci-002 instead. + GPT3Davinci = "davinci" + GPT3Davinci002 = "davinci-002" + // Deprecated: Model is shutdown. Use gpt-3.5-turbo-instruct instead. GPT3CurieInstructBeta = "curie-instruct-beta" GPT3Curie = "curie" GPT3Curie002 = "curie-002" - GPT3Ada = "ada" - GPT3Ada002 = "ada-002" - GPT3Babbage = "babbage" - GPT3Babbage002 = "babbage-002" + // Deprecated: Model is shutdown. Use babbage-002 instead. + GPT3Ada = "ada" + GPT3Ada002 = "ada-002" + // Deprecated: Model is shutdown. Use babbage-002 instead. + GPT3Babbage = "babbage" + GPT3Babbage002 = "babbage-002" ) // Codex Defines the models provided by OpenAI. @@ -74,6 +98,23 @@ const ( var disabledModelsForEndpoints = map[string]map[string]bool{ "/completions": { + GPT4Dot1: true, + GPT4Dot120250414: true, + GPT4Dot1Mini: true, + GPT4Dot1Mini20250414: true, + GPT4Dot1Nano: true, + GPT4Dot1Nano20250414: true, + GPTO3Latest: true, + GPTO320250416: true, + GPTO4MiniLatest: true, + GPTO4Mini20250416: true, + GPTO3MiniLatest: true, + GPTO3Mini20250131: true, + GPTO120241217: true, + GPTO1Preview: true, + GPTO1Preview20240912: true, + GPTO1Mini: true, + GPTO1Mini20240912: true, GPT3Dot5Turbo: true, GPT3Dot5Turbo0301: true, GPT3Dot5Turbo0613: true, @@ -82,6 +123,12 @@ var disabledModelsForEndpoints = map[string]map[string]bool{ GPT3Dot5Turbo16K: true, GPT3Dot5Turbo16K0613: true, GPT4: true, + GPT4o: true, + GPT4o20240513: true, + GPT4o20240806: true, + GPT4oLatest: true, + GPT4oMini: true, + GPT4oMini20240718: true, GPT4TurboPreview: true, GPT4VisionPreview: true, GPT4Turbo1106: true, @@ -125,25 +172,30 @@ func checkPromptType(prompt any) bool { // CompletionRequest represents a request structure for completion API. type CompletionRequest struct { - Model string `json:"model"` - Prompt any `json:"prompt,omitempty"` - Suffix string `json:"suffix,omitempty"` - MaxTokens int `json:"max_tokens,omitempty"` - Temperature float32 `json:"temperature,omitempty"` - TopP float32 `json:"top_p,omitempty"` - N int `json:"n,omitempty"` - Stream bool `json:"stream,omitempty"` - LogProbs int `json:"logprobs,omitempty"` - Echo bool `json:"echo,omitempty"` - Stop []string `json:"stop,omitempty"` - PresencePenalty float32 `json:"presence_penalty,omitempty"` - FrequencyPenalty float32 `json:"frequency_penalty,omitempty"` - BestOf int `json:"best_of,omitempty"` + Model string `json:"model"` + Prompt any `json:"prompt,omitempty"` + BestOf int `json:"best_of,omitempty"` + Echo bool `json:"echo,omitempty"` + FrequencyPenalty float32 `json:"frequency_penalty,omitempty"` // LogitBias is must be a token id string (specified by their token ID in the tokenizer), not a word string. // incorrect: `"logit_bias":{"You": 6}`, correct: `"logit_bias":{"1639": 6}` // refs: https://platform.openai.com/docs/api-reference/completions/create#completions/create-logit_bias LogitBias map[string]int `json:"logit_bias,omitempty"` - User string `json:"user,omitempty"` + LogProbs int `json:"logprobs,omitempty"` + // MaxCompletionTokens should be used for OpenAI provider + MaxCompletionTokens int `json:"max_completion_tokens,omitempty"` + // MaxTokens should be used for Azure provider + MaxTokens int `json:"max_tokens,omitempty"` + N int `json:"n,omitempty"` + PresencePenalty float32 `json:"presence_penalty,omitempty"` + Seed *int `json:"seed,omitempty"` + Stop []string `json:"stop,omitempty"` + Stream bool `json:"stream,omitempty"` + Suffix string `json:"suffix,omitempty"` + Temperature float32 `json:"temperature,omitempty"` + TopP float32 `json:"top_p,omitempty"` + User string `json:"user,omitempty"` + ReasoningEffort string `json:"reasoning_effort,omitempty"` } // CompletionChoice represents one of possible completions. diff --git a/completion_test.go b/completion_test.go index 89950bf94..2767702f3 100644 --- a/completion_test.go +++ b/completion_test.go @@ -12,8 +12,8 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestCompletionsWrongModel(t *testing.T) { diff --git a/config.go b/config.go index c58b71ec6..599fa89c0 100644 --- a/config.go +++ b/config.go @@ -23,6 +23,8 @@ const ( const AzureAPIKeyHeader = "api-key" +const defaultAssistantVersion = "v1" // This will be deprecated by the end of 2024. + // ClientConfig is a configuration of a client. type ClientConfig struct { authToken string @@ -30,7 +32,8 @@ type ClientConfig struct { BaseURL string OrgID string APIType APIType - APIVersion string // required when APIType is APITypeAzure or APITypeAzureAD + APIVersion string // required when APIType is APITypeAzure or APITypeAzureAD + AssistantVersion string AzureModelMapperFunc func(model string) string // replace model to azure deployment name func HTTPClient *http.Client @@ -39,10 +42,11 @@ type ClientConfig struct { func DefaultConfig(authToken string) ClientConfig { return ClientConfig{ - authToken: authToken, - BaseURL: openaiAPIURLv1, - APIType: APITypeOpenAI, - OrgID: "", + authToken: authToken, + BaseURL: openaiAPIURLv1, + APIType: APITypeOpenAI, + AssistantVersion: defaultAssistantVersion, + OrgID: "", HTTPClient: &http.Client{}, diff --git a/config_test.go b/config_test.go index 3e528c3e9..0dc14acae 100644 --- a/config_test.go +++ b/config_test.go @@ -3,7 +3,7 @@ package openai_test import ( "testing" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func TestGetAzureDeploymentByModel(t *testing.T) { diff --git a/edits_test.go b/edits_test.go index d2a6db40d..1a9ba1f48 100644 --- a/edits_test.go +++ b/edits_test.go @@ -9,8 +9,8 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) // TestEdits Tests the edits endpoint of the API using the mocked server. diff --git a/embeddings_test.go b/embeddings_test.go index 438978169..967e39437 100644 --- a/embeddings_test.go +++ b/embeddings_test.go @@ -11,8 +11,8 @@ import ( "reflect" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestEmbedding(t *testing.T) { diff --git a/engines_test.go b/engines_test.go index d26aa5541..294a9a91c 100644 --- a/engines_test.go +++ b/engines_test.go @@ -7,8 +7,8 @@ import ( "net/http" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) // TestGetEngine Tests the retrieve engine endpoint of the API using the mocked server. diff --git a/error.go b/error.go index 37959a272..b5aea407c 100644 --- a/error.go +++ b/error.go @@ -51,7 +51,7 @@ func (e *APIError) UnmarshalJSON(data []byte) (err error) { err = json.Unmarshal(rawMap["message"], &e.Message) if err != nil { // If the parameter field of a function call is invalid as a JSON schema - // refs: https://github.com/sashabaranov/go-openai/issues/381 + // refs: https://github.com/gradientlabs-ai/go-openai/issues/381 var messages []string err = json.Unmarshal(rawMap["message"], &messages) if err != nil { @@ -61,7 +61,7 @@ func (e *APIError) UnmarshalJSON(data []byte) (err error) { } // optional fields for azure openai - // refs: https://github.com/sashabaranov/go-openai/issues/343 + // refs: https://github.com/gradientlabs-ai/go-openai/issues/343 if _, ok := rawMap["type"]; ok { err = json.Unmarshal(rawMap["type"], &e.Type) if err != nil { diff --git a/error_test.go b/error_test.go index 48cbe4f29..39e641183 100644 --- a/error_test.go +++ b/error_test.go @@ -6,7 +6,7 @@ import ( "reflect" "testing" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func TestAPIErrorUnmarshalJSON(t *testing.T) { diff --git a/example_test.go b/example_test.go index de67c57cd..ff4451fbf 100644 --- a/example_test.go +++ b/example_test.go @@ -11,7 +11,7 @@ import ( "net/url" "os" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func Example() { diff --git a/examples/chatbot/main.go b/examples/chatbot/main.go index ad41e957d..63c45001c 100644 --- a/examples/chatbot/main.go +++ b/examples/chatbot/main.go @@ -6,7 +6,7 @@ import ( "fmt" "os" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func main() { diff --git a/examples/completion-with-tool/main.go b/examples/completion-with-tool/main.go index 26126e41b..e3fc41a3c 100644 --- a/examples/completion-with-tool/main.go +++ b/examples/completion-with-tool/main.go @@ -5,8 +5,8 @@ import ( "fmt" "os" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/jsonschema" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/jsonschema" ) func main() { diff --git a/examples/completion/main.go b/examples/completion/main.go index 22af1fd82..a1b1d4d0c 100644 --- a/examples/completion/main.go +++ b/examples/completion/main.go @@ -5,7 +5,7 @@ import ( "fmt" "os" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func main() { diff --git a/examples/images/main.go b/examples/images/main.go index 5ee649d22..58f576299 100644 --- a/examples/images/main.go +++ b/examples/images/main.go @@ -5,7 +5,7 @@ import ( "fmt" "os" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func main() { diff --git a/examples/voice-to-text/main.go b/examples/voice-to-text/main.go index 713e748e1..1d907fe71 100644 --- a/examples/voice-to-text/main.go +++ b/examples/voice-to-text/main.go @@ -6,7 +6,7 @@ import ( "fmt" "os" - "github.com/sashabaranov/go-openai" + "github.com/gradientlabs-ai/go-openai" ) func main() { diff --git a/files_api_test.go b/files_api_test.go index c92162a84..bf45178b2 100644 --- a/files_api_test.go +++ b/files_api_test.go @@ -12,8 +12,8 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestFileBytesUpload(t *testing.T) { diff --git a/files_test.go b/files_test.go index 3c1b99fb4..84c4b6185 100644 --- a/files_test.go +++ b/files_test.go @@ -7,8 +7,8 @@ import ( "os" "testing" - utils "github.com/sashabaranov/go-openai/internal" - "github.com/sashabaranov/go-openai/internal/test/checks" + utils "github.com/gradientlabs-ai/go-openai/internal" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestFileBytesUploadWithFailingFormBuilder(t *testing.T) { diff --git a/fine_tunes_test.go b/fine_tunes_test.go index 2ab6817f7..447817f59 100644 --- a/fine_tunes_test.go +++ b/fine_tunes_test.go @@ -7,8 +7,8 @@ import ( "net/http" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) const testFineTuneID = "fine-tune-id" diff --git a/fine_tuning_job_test.go b/fine_tuning_job_test.go index d2fbcd4c7..d60f1a9ca 100644 --- a/fine_tuning_job_test.go +++ b/fine_tuning_job_test.go @@ -7,8 +7,8 @@ import ( "net/http" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) const testFineTuninigJobID = "fine-tuning-job-id" diff --git a/go.mod b/go.mod index 42cc7b391..8cf500380 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,3 @@ -module github.com/sashabaranov/go-openai +module github.com/gradientlabs-ai/go-openai go 1.18 diff --git a/image_api_test.go b/image_api_test.go index 2eb46f2b4..738f96609 100644 --- a/image_api_test.go +++ b/image_api_test.go @@ -10,8 +10,8 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestImages(t *testing.T) { diff --git a/image_test.go b/image_test.go index 9332dd5cd..d6ba20580 100644 --- a/image_test.go +++ b/image_test.go @@ -1,8 +1,8 @@ package openai //nolint:testpackage // testing private field import ( - utils "github.com/sashabaranov/go-openai/internal" - "github.com/sashabaranov/go-openai/internal/test/checks" + utils "github.com/gradientlabs-ai/go-openai/internal" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" "context" "fmt" diff --git a/internal/error_accumulator_test.go b/internal/error_accumulator_test.go index d48f28177..ced38912c 100644 --- a/internal/error_accumulator_test.go +++ b/internal/error_accumulator_test.go @@ -5,8 +5,8 @@ import ( "errors" "testing" - utils "github.com/sashabaranov/go-openai/internal" - "github.com/sashabaranov/go-openai/internal/test" + utils "github.com/gradientlabs-ai/go-openai/internal" + "github.com/gradientlabs-ai/go-openai/internal/test" ) func TestErrorAccumulatorBytes(t *testing.T) { diff --git a/internal/form_builder_test.go b/internal/form_builder_test.go index d3faf9982..a6ce093f8 100644 --- a/internal/form_builder_test.go +++ b/internal/form_builder_test.go @@ -1,8 +1,8 @@ package openai //nolint:testpackage // testing private field import ( - "github.com/sashabaranov/go-openai/internal/test" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai/internal/test" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" "bytes" "errors" diff --git a/internal/test/helpers.go b/internal/test/helpers.go index 0e63ae82f..fc697e530 100644 --- a/internal/test/helpers.go +++ b/internal/test/helpers.go @@ -1,7 +1,7 @@ package test import ( - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" "net/http" "os" diff --git a/jsonschema/json_test.go b/jsonschema/json_test.go index 744706082..7d6f21203 100644 --- a/jsonschema/json_test.go +++ b/jsonschema/json_test.go @@ -5,7 +5,7 @@ import ( "reflect" "testing" - "github.com/sashabaranov/go-openai/jsonschema" + "github.com/gradientlabs-ai/go-openai/jsonschema" ) func TestDefinition_MarshalJSON(t *testing.T) { diff --git a/messages.go b/messages.go index 6fd0adbc9..0a5bbf179 100644 --- a/messages.go +++ b/messages.go @@ -76,7 +76,8 @@ type MessageFilesList struct { // CreateMessage creates a new message. func (c *Client) CreateMessage(ctx context.Context, threadID string, request MessageRequest) (msg Message, err error) { urlSuffix := fmt.Sprintf("/threads/%s/%s", threadID, messagesSuffix) - req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(urlSuffix), withBody(request), withBetaAssistantV1()) + req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(urlSuffix), withBody(request), + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -111,7 +112,7 @@ func (c *Client) ListMessage(ctx context.Context, threadID string, } urlSuffix := fmt.Sprintf("/threads/%s/%s%s", threadID, messagesSuffix, encodedValues) - req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantV1()) + req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -126,7 +127,7 @@ func (c *Client) RetrieveMessage( threadID, messageID string, ) (msg Message, err error) { urlSuffix := fmt.Sprintf("/threads/%s/%s/%s", threadID, messagesSuffix, messageID) - req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantV1()) + req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -143,7 +144,7 @@ func (c *Client) ModifyMessage( ) (msg Message, err error) { urlSuffix := fmt.Sprintf("/threads/%s/%s/%s", threadID, messagesSuffix, messageID) req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(urlSuffix), - withBody(map[string]any{"metadata": metadata}), withBetaAssistantV1()) + withBody(map[string]any{"metadata": metadata}), withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -158,7 +159,7 @@ func (c *Client) RetrieveMessageFile( threadID, messageID, fileID string, ) (file MessageFile, err error) { urlSuffix := fmt.Sprintf("/threads/%s/%s/%s/files/%s", threadID, messagesSuffix, messageID, fileID) - req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantV1()) + req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -173,7 +174,7 @@ func (c *Client) ListMessageFiles( threadID, messageID string, ) (files MessageFilesList, err error) { urlSuffix := fmt.Sprintf("/threads/%s/%s/%s/files", threadID, messagesSuffix, messageID) - req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantV1()) + req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } diff --git a/messages_test.go b/messages_test.go index a18be20bd..7ebdced03 100644 --- a/messages_test.go +++ b/messages_test.go @@ -7,8 +7,8 @@ import ( "net/http" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) var emptyStr = "" diff --git a/models_test.go b/models_test.go index 24a28ed23..83b8e1b67 100644 --- a/models_test.go +++ b/models_test.go @@ -9,8 +9,8 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) const testFineTuneModelID = "fine-tune-model-id" diff --git a/moderation_test.go b/moderation_test.go index 61171c384..c203a012b 100644 --- a/moderation_test.go +++ b/moderation_test.go @@ -11,8 +11,8 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) // TestModeration Tests the moderations endpoint of the API using the mocked server. diff --git a/openai_test.go b/openai_test.go index 729d8880c..a5def2be0 100644 --- a/openai_test.go +++ b/openai_test.go @@ -1,8 +1,8 @@ package openai_test import ( - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test" ) func setupOpenAITestServer() (client *openai.Client, server *test.ServerTest, teardown func()) { diff --git a/run.go b/run.go index 7c14779c5..1932d10ad 100644 --- a/run.go +++ b/run.go @@ -88,6 +88,7 @@ type RunRequest struct { AdditionalInstructions string `json:"additional_instructions,omitempty"` Tools []Tool `json:"tools,omitempty"` Metadata map[string]any `json:"metadata,omitempty"` + ToolChoice any `json:"tool_choice,omitempty"` // Sampling temperature between 0 and 2. Higher values like 0.8 are more random. // lower values are more focused and deterministic. @@ -226,8 +227,7 @@ func (c *Client) CreateRun( http.MethodPost, c.fullURL(urlSuffix), withBody(request), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -247,8 +247,7 @@ func (c *Client) RetrieveRun( ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -270,8 +269,7 @@ func (c *Client) ModifyRun( http.MethodPost, c.fullURL(urlSuffix), withBody(request), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -310,8 +308,7 @@ func (c *Client) ListRuns( ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -332,8 +329,7 @@ func (c *Client) SubmitToolOutputs( http.MethodPost, c.fullURL(urlSuffix), withBody(request), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -352,8 +348,7 @@ func (c *Client) CancelRun( ctx, http.MethodPost, c.fullURL(urlSuffix), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -372,8 +367,7 @@ func (c *Client) CreateThreadAndRun( http.MethodPost, c.fullURL(urlSuffix), withBody(request), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -394,8 +388,7 @@ func (c *Client) RetrieveRunStep( ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -435,8 +428,7 @@ func (c *Client) ListRunSteps( ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1(), - ) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } diff --git a/run_test.go b/run_test.go index cdf99db05..04996eff9 100644 --- a/run_test.go +++ b/run_test.go @@ -2,14 +2,13 @@ package openai_test import ( "context" - - openai "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" - "encoding/json" "fmt" "net/http" "testing" + + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) // TestAssistant Tests the assistant endpoint of the API using the mocked server. diff --git a/speech_test.go b/speech_test.go index d9ba58b13..2c83bf01b 100644 --- a/speech_test.go +++ b/speech_test.go @@ -11,9 +11,9 @@ import ( "path/filepath" "testing" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestSpeechIntegration(t *testing.T) { diff --git a/stream_reader.go b/stream_reader.go index 4210a1948..cb7310b7f 100644 --- a/stream_reader.go +++ b/stream_reader.go @@ -7,7 +7,7 @@ import ( "io" "net/http" - utils "github.com/sashabaranov/go-openai/internal" + utils "github.com/gradientlabs-ai/go-openai/internal" ) var ( diff --git a/stream_reader_test.go b/stream_reader_test.go index cd6e46eff..2c0125740 100644 --- a/stream_reader_test.go +++ b/stream_reader_test.go @@ -6,9 +6,9 @@ import ( "errors" "testing" - utils "github.com/sashabaranov/go-openai/internal" - "github.com/sashabaranov/go-openai/internal/test" - "github.com/sashabaranov/go-openai/internal/test/checks" + utils "github.com/gradientlabs-ai/go-openai/internal" + "github.com/gradientlabs-ai/go-openai/internal/test" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) var errTestUnmarshalerFailed = errors.New("test unmarshaler failed") diff --git a/stream_test.go b/stream_test.go index 2822a3535..e7eaa7652 100644 --- a/stream_test.go +++ b/stream_test.go @@ -10,8 +10,8 @@ import ( "testing" "time" - "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) func TestCompletionsStreamWrongModel(t *testing.T) { diff --git a/thread.go b/thread.go index 291f3dcab..900e3f2ea 100644 --- a/thread.go +++ b/thread.go @@ -51,7 +51,7 @@ type ThreadDeleteResponse struct { // CreateThread creates a new thread. func (c *Client) CreateThread(ctx context.Context, request ThreadRequest) (response Thread, err error) { req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(threadsSuffix), withBody(request), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -64,7 +64,7 @@ func (c *Client) CreateThread(ctx context.Context, request ThreadRequest) (respo func (c *Client) RetrieveThread(ctx context.Context, threadID string) (response Thread, err error) { urlSuffix := threadsSuffix + "/" + threadID req, err := c.newRequest(ctx, http.MethodGet, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -81,7 +81,7 @@ func (c *Client) ModifyThread( ) (response Thread, err error) { urlSuffix := threadsSuffix + "/" + threadID req, err := c.newRequest(ctx, http.MethodPost, c.fullURL(urlSuffix), withBody(request), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } @@ -97,7 +97,7 @@ func (c *Client) DeleteThread( ) (response ThreadDeleteResponse, err error) { urlSuffix := threadsSuffix + "/" + threadID req, err := c.newRequest(ctx, http.MethodDelete, c.fullURL(urlSuffix), - withBetaAssistantV1()) + withBetaAssistantVersion(c.config.AssistantVersion)) if err != nil { return } diff --git a/thread_test.go b/thread_test.go index 1ac0f3c0e..e7b223390 100644 --- a/thread_test.go +++ b/thread_test.go @@ -7,8 +7,8 @@ import ( "net/http" "testing" - openai "github.com/sashabaranov/go-openai" - "github.com/sashabaranov/go-openai/internal/test/checks" + "github.com/gradientlabs-ai/go-openai" + "github.com/gradientlabs-ai/go-openai/internal/test/checks" ) // TestThread Tests the thread endpoint of the API using the mocked server.