diff --git a/.gitignore b/.gitignore index 788230a8..d288e7ce 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,8 @@ obj/ *.suo /*.sln *.sln +/*.slnx +*.slnx *.user *.unityproj *.ipch diff --git a/OpenAI/.editorconfig b/OpenAI/.editorconfig index be3a59c5..e3567cdb 100644 --- a/OpenAI/.editorconfig +++ b/OpenAI/.editorconfig @@ -22,7 +22,7 @@ csharp_new_line_before_finally = true csharp_new_line_before_open_brace = all # Modifier preferences -dotnet_style_require_accessibility_modifiers = for_non_interface_members:error +dotnet_style_require_accessibility_modifiers = error # Code-block preferences csharp_prefer_braces = true:error @@ -33,8 +33,8 @@ dotnet_style_predefined_type_for_locals_parameters_members = true # Code Style csharp_style_var_when_type_is_apparent = true - dotnet_sort_system_directives_first = false +dotnet_analyzer_diagnostic.category-Style.severity = none #### Resharper/Rider Rules #### # https://www.jetbrains.com/help/resharper/EditorConfig_Properties.html diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Audio/AudioEndpoint.cs b/OpenAI/Packages/com.openai.unity/Runtime/Audio/AudioEndpoint.cs index 2612a510..9d0fb966 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Audio/AudioEndpoint.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Audio/AudioEndpoint.cs @@ -34,22 +34,21 @@ public async Task> CreateSpeechAsync(SpeechRequest requ [Obsolete("use GetSpeechAsync with Func overload")] public async Task> CreateSpeechStreamAsync(SpeechRequest request, Action partialClipCallback, CancellationToken cancellationToken = default) { - using var result = await GetSpeechAsync(request, speechClip => + using var result = await GetSpeechAsync(request, async speechClip => { partialClipCallback.Invoke(speechClip.AudioClip); + await Task.CompletedTask; }, cancellationToken); return Tuple.Create(result.CachePath, result.AudioClip); } [Obsolete("use GetSpeechAsync with Func overload")] public async Task GetSpeechAsync(SpeechRequest request, Action partialClipCallback, CancellationToken cancellationToken = default) - { - return await GetSpeechAsync(request, partialClipCallback: clip => + => await GetSpeechAsync(request, partialClipCallback: async clip => { partialClipCallback?.Invoke(clip); - return Task.CompletedTask; + await Task.CompletedTask; }, cancellationToken); - } /// /// Generates audio from the input text. @@ -61,6 +60,11 @@ public async Task GetSpeechAsync(SpeechRequest request, Action GetSpeechAsync(SpeechRequest request, Func partialClipCallback = null, CancellationToken cancellationToken = default) { + if (request == null) + { + throw new ArgumentNullException(nameof(request)); + } + if (partialClipCallback != null && request.ResponseFormat != SpeechResponseFormat.PCM) { Debug.LogWarning("Speech streaming only supported with PCM response format. Overriding to PCM..."); @@ -77,7 +81,7 @@ public async Task GetSpeechAsync(SpeechRequest request, Func GetSpeechAsync(SpeechRequest request, Func GetSpeechAsync(SpeechRequest request, Func 0) { var partialClip = new SpeechClip($"{clipName}_{++part}", null, partialResponse.Data); - - try - { - await partialClipCallback(partialClip).ConfigureAwait(false); - } - finally - { - partialClip.Dispose(); - } + await partialClipCallback(partialClip).ConfigureAwait(true); } }, 8192, new RestParameters(client.DefaultRequestHeaders, debug: EnableDebug), cancellationToken); pcmResponse.Validate(EnableDebug); @@ -119,17 +115,17 @@ public async Task GetSpeechAsync(SpeechRequest request, Func Dispose(); + [Preserve] + ~SpeechClip() => Dispose(false); [Preserve] public string Name { get; } @@ -112,13 +116,23 @@ public float Length [Preserve] public static implicit operator string(SpeechClip clip) => clip?.CachePath; + [Preserve] + private void Dispose(bool disposing) + { + if (disposing) + { + audioSamples?.Dispose(); + audioSamples = null; + audioData?.Dispose(); + audioData = null; + } + } + [Preserve] public void Dispose() { - audioSamples?.Dispose(); - audioSamples = null; - audioData?.Dispose(); - audioData = null; + Dispose(true); + GC.SuppressFinalize(this); } } } diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Chat/AudioOutput.cs b/OpenAI/Packages/com.openai.unity/Runtime/Chat/AudioOutput.cs index 66fa0a91..ee388deb 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Chat/AudioOutput.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Chat/AudioOutput.cs @@ -137,7 +137,9 @@ private void Dispose(bool disposing) if (disposing) { audioSamples?.Dispose(); - AudioData.Dispose(); + audioSamples = null; + audioData?.Dispose(); + audioData = null; } } diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Extensions/TextureExtensions.cs b/OpenAI/Packages/com.openai.unity/Runtime/Extensions/TextureExtensions.cs index 6ccbf2b8..ccc73589 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Extensions/TextureExtensions.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Extensions/TextureExtensions.cs @@ -1,34 +1,94 @@ // Licensed under the MIT License. See LICENSE in the project root for license information. -using System; -using System.IO; using System.Threading; using System.Threading.Tasks; +using Unity.Collections; using UnityEngine; +using Utilities.Extensions; +using OpenAI.Images; +using Utilities.Async; +using System; using Utilities.WebRequestRest; +#if !PLATFORM_WEBGL +using System.IO; +#endif + namespace OpenAI.Extensions { - internal static class TextureExtensions + public static class TextureExtensions { - public static async Task<(Texture2D, string)> ConvertFromBase64Async(string b64, bool debug, CancellationToken cancellationToken) + internal static async Task<(Texture2D, Uri)> ConvertFromBase64Async(string b64, bool debug, CancellationToken cancellationToken) { - var imageData = Convert.FromBase64String(b64); + using var imageData = NativeArrayExtensions.FromBase64String(b64, Allocator.Persistent); #if PLATFORM_WEBGL var texture = new Texture2D(2, 2); +#if UNITY_6000_0_OR_NEWER texture.LoadImage(imageData); - return await Task.FromResult((texture, string.Empty)); #else - if (!Rest.TryGetDownloadCacheItem(b64, out var localFilePath)) + texture.LoadImage(imageData.ToArray()); +#endif // UNITY_6000_0_OR_NEWER + return await Task.FromResult((texture, null as Uri)); +#else + if (!Rest.TryGetDownloadCacheItem(b64, out Uri localUri)) { - await File.WriteAllBytesAsync(localFilePath, imageData, cancellationToken).ConfigureAwait(true); - localFilePath = $"file://{localFilePath}"; + await using var fs = new FileStream(localUri.LocalPath, FileMode.Create, FileAccess.Write); + await fs.WriteAsync(imageData, cancellationToken: cancellationToken); } - var texture = await Rest.DownloadTextureAsync(localFilePath, parameters: new RestParameters(debug: debug), cancellationToken: cancellationToken); - Rest.TryGetDownloadCacheItem(b64, out var cachedPath); - return (texture, cachedPath); -#endif + var texture = await Rest.DownloadTextureAsync(localUri.LocalPath, parameters: new RestParameters(debug: debug), cancellationToken: cancellationToken); + Rest.TryGetDownloadCacheItem(b64, out Uri cachedUri); + return (texture, cachedUri); +#endif // !PLATFORM_WEBGL + } + + /// + /// Loads a Texture2D from an ImageResult, handling base64, cached path, or URL. + /// + /// The to load the texture for. + /// Optional, debug flag. + /// Optional, . + /// + /// A tuple containing the converted and the cached file path as a . + /// + public static async Task<(Texture2D, Uri)> LoadTextureAsync(this ImageResult imageResult, bool debug = false, CancellationToken cancellationToken = default) + { + await Awaiters.UnityMainThread; + + if (imageResult.Texture.IsNull()) + { + if (!string.IsNullOrWhiteSpace(imageResult.B64_Json)) + { + var (texture, cachedUri) = await ConvertFromBase64Async(imageResult.B64_Json, debug, cancellationToken); + imageResult.Texture = texture; + imageResult.CachedPathUri = cachedUri; + } + else + { + Texture2D texture; + Uri cachedPath; + + if (imageResult.CachedPathUri != null) + { + texture = await Rest.DownloadTextureAsync(imageResult.CachedPathUri, parameters: new RestParameters(debug: debug), cancellationToken: cancellationToken); + cachedPath = imageResult.CachedPathUri; + } + else if (imageResult.Uri != null) + { + texture = await Rest.DownloadTextureAsync(imageResult.Uri, parameters: new RestParameters(debug: debug), cancellationToken: cancellationToken); + cachedPath = Rest.TryGetDownloadCacheItem(imageResult.Uri, out var path) ? path : null; + } + else + { + throw new InvalidOperationException("ImageResult does not contain valid image data."); + } + + imageResult.Texture = texture; + imageResult.CachedPathUri = cachedPath; + } + } + + return (imageResult.Texture, imageResult.CachedPathUri); } } } diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Images/ImageResult.cs b/OpenAI/Packages/com.openai.unity/Runtime/Images/ImageResult.cs index 255df3cd..af72c5ee 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Images/ImageResult.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Images/ImageResult.cs @@ -18,6 +18,12 @@ internal ImageResult( [JsonProperty("revised_prompt")] string revisedPrompt) { Url = url; + + if (!string.IsNullOrWhiteSpace(url)) + { + Uri = new Uri(url); + } + B64_Json = b64_json; RevisedPrompt = revisedPrompt; } @@ -26,6 +32,10 @@ internal ImageResult( [JsonProperty("url", DefaultValueHandling = DefaultValueHandling.Ignore)] public string Url { get; private set; } + [Preserve] + [JsonIgnore] + public Uri Uri { get; } + [Preserve] [JsonProperty("b64_json", DefaultValueHandling = DefaultValueHandling.Ignore)] public string B64_Json { get; private set; } @@ -56,7 +66,12 @@ internal ImageResult( [Preserve] [JsonIgnore] - public string CachedPath { get; internal set; } + [Obsolete("use CachedPathUri")] + public string CachedPath => CachedPathUri?.ToString(); + + [Preserve] + [JsonIgnore] + public Uri CachedPathUri { get; internal set; } [Preserve] [JsonIgnore] @@ -75,9 +90,9 @@ internal ImageResult( [Preserve] public override string ToString() { - if (!string.IsNullOrWhiteSpace(CachedPath)) + if (CachedPathUri != null) { - return CachedPath; + return CachedPathUri.ToString(); } if (!string.IsNullOrWhiteSpace(B64_Json)) diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Images/ImagesEndpoint.cs b/OpenAI/Packages/com.openai.unity/Runtime/Images/ImagesEndpoint.cs index 156b459a..1ffa6645 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Images/ImagesEndpoint.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Images/ImagesEndpoint.cs @@ -9,7 +9,6 @@ using System.Threading; using System.Threading.Tasks; using UnityEngine; -using Utilities.Async; using Utilities.WebRequestRest; namespace OpenAI.Images @@ -180,33 +179,15 @@ private async Task> DeserializeResponseAsync(Response } await Rest.ValidateCacheDirectoryAsync(); - var downloads = imagesResponse.Results.Select(DownloadAsync).ToList(); - async Task DownloadAsync(ImageResult result) - { - await Awaiters.UnityMainThread; - - if (string.IsNullOrWhiteSpace(result.Url)) - { - var (texture, cachePath) = await TextureExtensions.ConvertFromBase64Async(result.B64_Json, EnableDebug, cancellationToken); - result.Texture = texture; - result.CachedPath = cachePath; - } - else - { - result.Texture = await Rest.DownloadTextureAsync(result.Url, parameters: new RestParameters(debug: EnableDebug), cancellationToken: cancellationToken); - - if (Rest.TryGetDownloadCacheItem(result.Url, out var cachedPath)) - { - result.CachedPath = cachedPath; - } - } - } + Task<(Texture2D, Uri)> DownloadAsync(ImageResult result) + => result.LoadTextureAsync(debug: EnableDebug, cancellationToken); - await Task.WhenAll(downloads).ConfigureAwait(true); + await Task.WhenAll(imagesResponse.Results.Select(DownloadAsync).ToList()).ConfigureAwait(true); - foreach (var result in imagesResponse.Results) + for (var i = 0; i < imagesResponse.Results.Count; i++) { + var result = imagesResponse.Results[i]; result.CreatedAt = DateTimeOffset.FromUnixTimeSeconds(imagesResponse.CreatedAtUnixSeconds).UtcDateTime; result.Background = imagesResponse.Background; result.OutputFormat = imagesResponse.OutputFormat; diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Models/Model.cs b/OpenAI/Packages/com.openai.unity/Runtime/Models/Model.cs index 10fef867..d15bd052 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Models/Model.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Models/Model.cs @@ -104,6 +104,18 @@ internal Model( #region Reasoning Models + /// + /// GPT-5.2 pro is available in the Responses API only to enable support for multi-turn model interactions before responding to API requests, + /// and other advanced API features in the future. Since GPT-5.2 pro is designed to tackle tough problems, + /// some requests may take several minutes to finish. To avoid timeouts, try using background mode. + /// GPT-5.2 pro supports reasoning.effort: medium, high, xhigh. + /// + /// + /// - Context Window: 400,000 context window
+ /// - Max Output Tokens: 128,000 max output tokens + ///
+ public static Model GPT5_2_Pro { get; } = new("gpt-5.2-pro", "openai"); + /// /// The o1 series of models are trained with reinforcement learning to perform complex reasoning. /// o1 models think before they answer, producing a long internal chain of thought before responding to the user. @@ -218,6 +230,15 @@ internal Model( #region Chat Models + /// + /// GPT-5.2 is our flagship model for coding and agentic tasks across industries. + /// + /// + /// - Context Window: 400,000 context window
+ /// - Max Output Tokens: 128,000 max output tokens + ///
+ public static Model GPT5_2 { get; } = new("gpt-5.2", "openai"); + /// /// GPT-5 is our flagship model for coding, reasoning, and agentic tasks across domains. /// @@ -527,6 +548,35 @@ internal Model( #region Specialized Models + /// + /// GPT-5.1-Codex-Max is purpose-built for agentic coding. + /// It's only available in the Responses API. + /// + /// + /// - Context Window: 400,000 tokens
+ /// - Max Output Tokens: 128,000 tokens + ///
+ public static Model GPT5_1_CodexMax { get; } = new("gpt-5.1-codex-max", "openai"); + + /// + /// GPT-5.1-Codex is a version of GPT-5 optimized for agentic coding tasks in Codex or similar environments. + /// It's available in the Responses API + /// + /// + /// - Context Window: 400,000 tokens
+ /// - Max Output Tokens: 128,000 tokens + ///
+ public static Model GPT5_1_Codex { get; } = new("gpt-5.1-codex", "openai"); + + /// + /// GPT-5.1 Codex mini is a smaller, more cost-effective, less-capable version of GPT-5.1-Codex. + /// + /// + /// - Context Window: 400,000 tokens
+ /// - Max Output Tokens: 128,000 tokens + ///
+ public static Model GPT5_1_CodexMini { get; } = new("gpt-5.1-codex-mini", "openai"); + /// /// GPT-5-Codex is a version of GPT-5 optimized for agentic coding tasks in Codex or similar environments. /// It's available in the Responses API only and the underlying model snapshot will be regularly updated. diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Responses/CreateResponseRequest.cs b/OpenAI/Packages/com.openai.unity/Runtime/Responses/CreateResponseRequest.cs index 5fafe24f..bde937ac 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Responses/CreateResponseRequest.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Responses/CreateResponseRequest.cs @@ -164,7 +164,7 @@ public CreateResponseRequest( { Input = input?.ToArray() ?? throw new ArgumentNullException(nameof(input)); Model = string.IsNullOrWhiteSpace(model?.Id) && prompt == null - ? Models.Model.GPT4oRealtime + ? Models.Model.GPT5_Mini : model; Background = background; Include = include?.ToList(); diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalRequest.cs b/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalRequest.cs index f6a83d3c..e1cb11c3 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalRequest.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalRequest.cs @@ -34,6 +34,7 @@ public MCPApprovalRequest(string name, string arguments = null, string serverLab Name = name; Arguments = arguments; ServerLabel = serverLabel; + Type = ResponseItemType.McpApprovalRequest; } /// diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalResponse.cs b/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalResponse.cs index daa03e8b..60411862 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalResponse.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPApprovalResponse.cs @@ -28,6 +28,14 @@ internal MCPApprovalResponse( Reason = reason; } + [Preserve] + public MCPApprovalResponse(string approvalRequestId, bool approve) + { + ApprovalRequestId = approvalRequestId; + Approve = approve; + Type = ResponseItemType.McpApprovalResponse; + } + /// [Preserve] [JsonProperty("id", DefaultValueHandling = DefaultValueHandling.Ignore)] diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPToolCall.cs b/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPToolCall.cs index 67543296..4604bbd2 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPToolCall.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Responses/MCPToolCall.cs @@ -127,6 +127,6 @@ internal string Delta /// [Preserve] [JsonProperty("error", DefaultValueHandling = DefaultValueHandling.Ignore)] - public string Error { get; } + public JObject Error { get; } } } diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Responses/Message.cs b/OpenAI/Packages/com.openai.unity/Runtime/Responses/Message.cs index 19870a4d..1c5e8b3b 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Responses/Message.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Responses/Message.cs @@ -41,7 +41,7 @@ internal Message( [Preserve] public Message(Role role, string text) - : this(role, new TextContent(text)) + : this(role, new TextContent(text, role == Role.Assistant ? ResponseContentType.OutputText : ResponseContentType.InputText)) { } diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Responses/ResponsesEndpoint.cs b/OpenAI/Packages/com.openai.unity/Runtime/Responses/ResponsesEndpoint.cs index 4fdc38e8..800019bc 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Responses/ResponsesEndpoint.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Responses/ResponsesEndpoint.cs @@ -409,7 +409,8 @@ private async Task StreamResponseAsync(string endpoint, string payload } case "error": { - serverSentEvent = sseResponse.Deserialize(client); + var error = @object["error"]?.ToObject(); + serverSentEvent = error ?? sseResponse.Deserialize(client); break; } // Event status messages with no data payloads: diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Responses/TextContent.cs b/OpenAI/Packages/com.openai.unity/Runtime/Responses/TextContent.cs index 4d83acd2..66de38a3 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Responses/TextContent.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Responses/TextContent.cs @@ -29,9 +29,9 @@ internal TextContent( } [Preserve] - public TextContent(string text) + public TextContent(string text, ResponseContentType type = ResponseContentType.InputText) { - Type = ResponseContentType.InputText; + Type = type; Text = text; } diff --git a/OpenAI/Packages/com.openai.unity/Runtime/Threads/ThreadsEndpoint.cs b/OpenAI/Packages/com.openai.unity/Runtime/Threads/ThreadsEndpoint.cs index cc5d17c0..7ce5cf57 100644 --- a/OpenAI/Packages/com.openai.unity/Runtime/Threads/ThreadsEndpoint.cs +++ b/OpenAI/Packages/com.openai.unity/Runtime/Threads/ThreadsEndpoint.cs @@ -507,6 +507,7 @@ private async Task StreamRunAsync(string endpoint, string payload, { IServerSentEvent serverSentEvent = null; var @event = ssEvent.Value.Value(); + var @object = ssEvent.Data ?? ssEvent.Value; // ReSharper disable AccessToModifiedClosure try @@ -580,7 +581,8 @@ private async Task StreamRunAsync(string endpoint, string payload, serverSentEvent = message; break; case "error": - serverSentEvent = sseResponse.Deserialize(client); + var error = @object["error"]?.ToObject(); + serverSentEvent = error ?? sseResponse.Deserialize(client); break; default: // if not properly handled raise it up to caller to deal with it themselves. diff --git a/OpenAI/Packages/com.openai.unity/Samples~/Assistant/AssistantBehaviour.cs b/OpenAI/Packages/com.openai.unity/Samples~/Assistant/AssistantBehaviour.cs index 405868a9..35499fbe 100644 --- a/OpenAI/Packages/com.openai.unity/Samples~/Assistant/AssistantBehaviour.cs +++ b/OpenAI/Packages/com.openai.unity/Samples~/Assistant/AssistantBehaviour.cs @@ -342,9 +342,8 @@ private async Task GenerateSpeechAsync(string text, CancellationToken cancellati var stopwatch = Stopwatch.StartNew(); using var speechClip = await openAI.AudioEndpoint.GetSpeechAsync( request, - partialClip => streamAudioSource.SampleCallbackAsync(partialClip.AudioSamples), - cancellationToken) - .ConfigureAwait(true); + async partialClip => await streamAudioSource.SampleCallbackAsync(partialClip.AudioSamples), + cancellationToken).ConfigureAwait(true); var playbackTime = speechClip.Length - (float)stopwatch.Elapsed.TotalSeconds + 0.1f; if (playbackTime > 0) diff --git a/OpenAI/Packages/com.openai.unity/Samples~/Chat/ChatBehaviour.cs b/OpenAI/Packages/com.openai.unity/Samples~/Chat/ChatBehaviour.cs index e9cd8f4e..9b6bb3cd 100644 --- a/OpenAI/Packages/com.openai.unity/Samples~/Chat/ChatBehaviour.cs +++ b/OpenAI/Packages/com.openai.unity/Samples~/Chat/ChatBehaviour.cs @@ -11,7 +11,6 @@ using System.Threading; using System.Threading.Tasks; using TMPro; -using Unity.Collections; using UnityEngine; using UnityEngine.EventSystems; using UnityEngine.UI; @@ -249,7 +248,7 @@ private async Task GenerateSpeechAsync(string text, CancellationToken cancellati var stopwatch = Stopwatch.StartNew(); using var speechClip = await openAI.AudioEndpoint.GetSpeechAsync( request, - partialClip => streamAudioSource.SampleCallbackAsync(partialClip.AudioSamples), + async partialClip => await streamAudioSource.SampleCallbackAsync(partialClip.AudioSamples), cancellationToken) .ConfigureAwait(true); var playbackTime = speechClip.Length - (float)stopwatch.Elapsed.TotalSeconds + 0.1f; diff --git a/OpenAI/Packages/com.openai.unity/Samples~/Responses/ResponsesBehaviour.cs b/OpenAI/Packages/com.openai.unity/Samples~/Responses/ResponsesBehaviour.cs index f5243b1d..b2164642 100644 --- a/OpenAI/Packages/com.openai.unity/Samples~/Responses/ResponsesBehaviour.cs +++ b/OpenAI/Packages/com.openai.unity/Samples~/Responses/ResponsesBehaviour.cs @@ -193,7 +193,7 @@ private async Task GenerateSpeechAsync(string text, CancellationToken cancellati var stopwatch = Stopwatch.StartNew(); using var speechClip = await openAI.AudioEndpoint.GetSpeechAsync( request, - partialClip => streamAudioSource.SampleCallbackAsync(partialClip.AudioSamples), + async partialClip => await streamAudioSource.SampleCallbackAsync(partialClip.AudioSamples), cancellationToken) .ConfigureAwait(true); var playbackTime = speechClip.Length - (float)stopwatch.Elapsed.TotalSeconds + 0.1f; diff --git a/OpenAI/Packages/com.openai.unity/Tests/TestFixture_05_Images.cs b/OpenAI/Packages/com.openai.unity/Tests/TestFixture_05_Images.cs index 3c1914d1..7967d63f 100644 --- a/OpenAI/Packages/com.openai.unity/Tests/TestFixture_05_Images.cs +++ b/OpenAI/Packages/com.openai.unity/Tests/TestFixture_05_Images.cs @@ -44,12 +44,10 @@ public async Task Test_01_01_GenerateImages() var result = imageResults[i]; Assert.IsNotNull(result); Assert.IsNotNull(result.Texture); + Assert.IsNotNull(result.CachedPathUri); Assert.IsFalse(string.IsNullOrWhiteSpace(result.B64_Json)); - var imageBytes = Convert.FromBase64String(result.B64_Json); - Assert.IsNotNull(imageBytes); var path = Path.Combine(testDirectory, $"{nameof(Test_01_01_GenerateImages)}-{i}-{DateTime.UtcNow:yyyyMMddHHmmss}.jpeg"); - await using var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write); - await fileStream.WriteAsync(imageBytes, 0, imageBytes.Length); + File.Copy(result.CachedPathUri.LocalPath, path, true); } } catch (Exception e) @@ -82,12 +80,9 @@ public async Task Test_02_01_CreateImageEdit_Path() var result = imageResults[i]; Assert.IsNotNull(result); Assert.IsNotNull(result.Texture); - Assert.IsFalse(string.IsNullOrWhiteSpace(result.B64_Json)); - var imageBytes = Convert.FromBase64String(result.B64_Json); - Assert.IsNotNull(imageBytes); + Assert.IsNotNull(result.CachedPathUri); var path = Path.Combine(testDirectory, $"{nameof(Test_02_01_CreateImageEdit_Path)}-{i}-{DateTime.UtcNow:yyyyMMddHHmmss}.png"); - await using var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write); - await fileStream.WriteAsync(imageBytes, 0, imageBytes.Length); + File.Copy(result.CachedPathUri.LocalPath, path, true); } } catch (Exception e) @@ -122,12 +117,10 @@ public async Task Test_02_02_CreateImageEdit_Texture() var result = imageResults[i]; Assert.IsNotNull(result); Assert.IsNotNull(result.Texture); + Assert.IsNotNull(result.CachedPathUri); Assert.IsFalse(string.IsNullOrWhiteSpace(result.B64_Json)); - var imageBytes = Convert.FromBase64String(result.B64_Json); - Assert.IsNotNull(imageBytes); var path = Path.Combine(testDirectory, $"{nameof(Test_02_02_CreateImageEdit_Texture)}-{i}-{DateTime.UtcNow:yyyyMMddHHmmss}.png"); - await using var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write); - await fileStream.WriteAsync(imageBytes, 0, imageBytes.Length); + File.Copy(result.CachedPathUri.LocalPath, path, true); } } catch (Exception e) @@ -159,12 +152,10 @@ public async Task Test_02_03_CreateImageEdit_MaskAsTransparency() var result = imageResults[i]; Assert.IsNotNull(result); Assert.IsNotNull(result.Texture); + Assert.IsNotNull(result.CachedPathUri); Assert.IsFalse(string.IsNullOrWhiteSpace(result.B64_Json)); - var imageBytes = Convert.FromBase64String(result.B64_Json); - Assert.IsNotNull(imageBytes); var path = Path.Combine(testDirectory, $"{nameof(Test_02_03_CreateImageEdit_MaskAsTransparency)}-{i}-{DateTime.UtcNow:yyyyMMddHHmmss}.png"); - await using var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write); - await fileStream.WriteAsync(imageBytes, 0, imageBytes.Length); + File.Copy(result.CachedPathUri.LocalPath, path, true); } } catch (Exception e) @@ -202,12 +193,10 @@ public async Task Test_02_04_CreateImageEdit_MultipleFiles() var result = imageResults[i]; Assert.IsNotNull(result); Assert.IsNotNull(result.Texture); + Assert.IsNotNull(result.CachedPathUri); Assert.IsFalse(string.IsNullOrWhiteSpace(result.B64_Json)); - var imageBytes = Convert.FromBase64String(result.B64_Json); - Assert.IsNotNull(imageBytes); var path = Path.Combine(testDirectory, $"{nameof(Test_02_04_CreateImageEdit_MultipleFiles)}-{i}-{DateTime.UtcNow:yyyyMMddHHmmss}.png"); - await using var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write); - await fileStream.WriteAsync(imageBytes, 0, imageBytes.Length); + File.Copy(result.CachedPathUri.LocalPath, path, true); } } catch (Exception e) @@ -232,8 +221,9 @@ public async Task Test_03_01_CreateImageVariation_Path() foreach (var result in imageResults) { - Assert.IsNotNull(result.Texture); Debug.Log(result.ToString()); + Assert.IsNotNull(result.Texture); + Assert.IsNull(result.CachedPathUri); } } catch (Exception e) @@ -261,6 +251,7 @@ public async Task Test_03_02_CreateImageVariation_Texture() { Debug.Log(result.ToString()); Assert.IsNotNull(result.Texture); + Assert.IsNull(result.CachedPathUri); } } catch (Exception e) @@ -289,12 +280,10 @@ public async Task Test_03_03_CreateImageVariation_Texture_B64_Json() var result = imageResults[i]; Assert.IsNotNull(result); Assert.IsNotNull(result.Texture); + Assert.IsNotNull(result.CachedPathUri); Assert.IsFalse(string.IsNullOrWhiteSpace(result.B64_Json)); - var imageBytes = Convert.FromBase64String(result.B64_Json); - Assert.IsNotNull(imageBytes); var path = Path.Combine(testDirectory, $"{nameof(Test_03_03_CreateImageVariation_Texture_B64_Json)}-{i}-{DateTime.UtcNow:yyyyMMddHHmmss}.png"); - await using var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write); - await fileStream.WriteAsync(imageBytes, 0, imageBytes.Length); + File.Copy(result.CachedPathUri.LocalPath, path, true); } } catch (Exception e) diff --git a/OpenAI/Packages/com.openai.unity/Tests/TestFixture_07_Audio.cs b/OpenAI/Packages/com.openai.unity/Tests/TestFixture_07_Audio.cs index 35194593..522284db 100644 --- a/OpenAI/Packages/com.openai.unity/Tests/TestFixture_07_Audio.cs +++ b/OpenAI/Packages/com.openai.unity/Tests/TestFixture_07_Audio.cs @@ -182,10 +182,10 @@ public async Task Test_03_02_01_Speech_Streaming() input: "Hello world!", responseFormat: SpeechResponseFormat.PCM); var clipQueue = new ConcurrentQueue(); - using var speechClip = await OpenAIClient.AudioEndpoint.GetSpeechAsync(request, partialClip => + using var speechClip = await OpenAIClient.AudioEndpoint.GetSpeechAsync(request, async partialClip => { clipQueue.Enqueue(partialClip); - return Task.CompletedTask; + await Task.CompletedTask; }); Debug.Log(speechClip.CachePath); Assert.IsNotEmpty(speechClip.AudioSamples); @@ -205,10 +205,10 @@ public async Task Test_03_02_02_SpeechWithInstructions_Streaming() responseFormat: SpeechResponseFormat.PCM, instructions: instructions); var clipQueue = new ConcurrentQueue(); - using var speechClip = await OpenAIClient.AudioEndpoint.GetSpeechAsync(request, partialClip => + using var speechClip = await OpenAIClient.AudioEndpoint.GetSpeechAsync(request, async partialClip => { clipQueue.Enqueue(partialClip); - return Task.CompletedTask; + await Task.CompletedTask; }); Debug.Log(speechClip.CachePath); Assert.IsNotEmpty(speechClip.AudioSamples); diff --git a/OpenAI/Packages/com.openai.unity/package.json b/OpenAI/Packages/com.openai.unity/package.json index 28de899e..813b228b 100644 --- a/OpenAI/Packages/com.openai.unity/package.json +++ b/OpenAI/Packages/com.openai.unity/package.json @@ -3,7 +3,7 @@ "displayName": "OpenAI", "description": "A OpenAI package for the Unity to use though their RESTful API.\n\nIndependently developed, this is not an official library and I am not affiliated with OpenAI.\n\nAn OpenAI API account is required.", "keywords": [], - "version": "8.8.7", + "version": "8.8.8", "unity": "2021.3", "documentationUrl": "https://github.com/RageAgainstThePixel/com.openai.unity#documentation", "changelogUrl": "https://github.com/RageAgainstThePixel/com.openai.unity/releases", @@ -17,8 +17,9 @@ "url": "https://github.com/StephenHodgson" }, "dependencies": { + "com.utilities.audio": "3.0.2", "com.utilities.encoder.wav": "3.0.2", - "com.utilities.rest": "5.0.4", + "com.utilities.rest": "5.1.1", "com.utilities.websockets": "2.0.0" }, "samples": [ diff --git a/OpenAI/Packages/manifest.json b/OpenAI/Packages/manifest.json index 2797ec87..b319e521 100644 --- a/OpenAI/Packages/manifest.json +++ b/OpenAI/Packages/manifest.json @@ -3,6 +3,7 @@ "com.unity.ide.rider": "3.0.38", "com.unity.ide.visualstudio": "2.0.25", "com.unity.inputsystem": "1.14.2", + "com.unity.mobile.android-logcat": "1.4.6", "com.unity.textmeshpro": "3.0.9", "com.unity.ugui": "1.0.0", "com.utilities.buildpipeline": "1.8.1" diff --git a/OpenAI/ProjectSettings/ProjectSettings.asset b/OpenAI/ProjectSettings/ProjectSettings.asset index adc2a0e4..d4e6dd49 100644 --- a/OpenAI/ProjectSettings/ProjectSettings.asset +++ b/OpenAI/ProjectSettings/ProjectSettings.asset @@ -998,7 +998,7 @@ PlayerSettings: m_VersionCode: 1 m_VersionName: 8.7.4 apiCompatibilityLevel: 3 - activeInputHandler: 2 + activeInputHandler: 1 windowsGamepadBackendHint: 0 cloudProjectId: framebufferDepthMemorylessMode: 0