From f59ce9247ac0333588f4a90b791b8b2b38577ab0 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 10:58:49 +0800 Subject: [PATCH 01/14] Target .Net 6 and .Net Standard 2.0 for the SDK project --- OpenAI.SDK/OpenAI.GPT3.csproj | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/OpenAI.SDK/OpenAI.GPT3.csproj b/OpenAI.SDK/OpenAI.GPT3.csproj index e8e47acc..cc239bf2 100644 --- a/OpenAI.SDK/OpenAI.GPT3.csproj +++ b/OpenAI.SDK/OpenAI.GPT3.csproj @@ -1,9 +1,10 @@  - net6.0 + net6.0;netstandard2.0 enable enable + Latest Betalgo Up Ltd. https://openai.com/ @@ -46,5 +47,8 @@ - + + + + From bf80437949693ce6fcadf9f544279998f6fadb68 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 11:01:21 +0800 Subject: [PATCH 02/14] Fix XmlDoc param name --- OpenAI.SDK/Interfaces/IChatCompletionService.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/OpenAI.SDK/Interfaces/IChatCompletionService.cs b/OpenAI.SDK/Interfaces/IChatCompletionService.cs index 46f8a446..0c6b5f56 100644 --- a/OpenAI.SDK/Interfaces/IChatCompletionService.cs +++ b/OpenAI.SDK/Interfaces/IChatCompletionService.cs @@ -30,7 +30,7 @@ public interface IChatCompletionService /// /// Creates a new completion for the provided prompt and parameters /// - /// + /// /// The ID of the model to use for this request /// Propagates notification that operations should be canceled. /// From b90f2b939bb1f4022deca26bae5fc33ef036d37a Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 11:23:12 +0800 Subject: [PATCH 03/14] Refactor default implementations in interfaces into static extension classes to provide compatibility with .Net Standard 2.0 --- .../Interfaces/IChatCompletionService.cs | 8 +++- OpenAI.SDK/Interfaces/ICompletionService.cs | 8 +++- OpenAI.SDK/Interfaces/IEditService.cs | 8 +++- OpenAI.SDK/Interfaces/IFileService.cs | 40 ++++++++++--------- OpenAI.SDK/Interfaces/IImageService.cs | 26 +++++++----- OpenAI.SDK/Interfaces/IModerationService.cs | 8 +++- 6 files changed, 61 insertions(+), 37 deletions(-) diff --git a/OpenAI.SDK/Interfaces/IChatCompletionService.cs b/OpenAI.SDK/Interfaces/IChatCompletionService.cs index 0c6b5f56..f39553e1 100644 --- a/OpenAI.SDK/Interfaces/IChatCompletionService.cs +++ b/OpenAI.SDK/Interfaces/IChatCompletionService.cs @@ -26,16 +26,20 @@ public interface IChatCompletionService /// Propagates notification that operations should be canceled. /// IAsyncEnumerable CreateCompletionAsStream(ChatCompletionCreateRequest chatCompletionCreate, string? modelId = null, CancellationToken cancellationToken = default); +} +public static class IChatCompletionServiceExtension +{ /// /// Creates a new completion for the provided prompt and parameters /// + /// /// /// The ID of the model to use for this request /// Propagates notification that operations should be canceled. /// - Task Create(ChatCompletionCreateRequest chatCompletionCreate, Models.Model modelId, CancellationToken cancellationToken = default) + public static Task Create(this IChatCompletionService service, ChatCompletionCreateRequest chatCompletionCreate, Models.Model modelId, CancellationToken cancellationToken = default) { - return CreateCompletion(chatCompletionCreate, modelId.EnumToString(), cancellationToken); + return service.CreateCompletion(chatCompletionCreate, modelId.EnumToString(), cancellationToken); } } \ No newline at end of file diff --git a/OpenAI.SDK/Interfaces/ICompletionService.cs b/OpenAI.SDK/Interfaces/ICompletionService.cs index 3954b27f..9251c272 100644 --- a/OpenAI.SDK/Interfaces/ICompletionService.cs +++ b/OpenAI.SDK/Interfaces/ICompletionService.cs @@ -27,16 +27,20 @@ public interface ICompletionService /// Propagates notification that operations should be canceled. /// IAsyncEnumerable CreateCompletionAsStream(CompletionCreateRequest createCompletionModel, string? modelId = null, CancellationToken cancellationToken = default); +} +public static class ICompletionServiceExtension +{ /// /// Creates a new completion for the provided prompt and parameters /// + /// /// /// The ID of the model to use for this request /// Propagates notification that operations should be canceled. /// - Task Create(CompletionCreateRequest createCompletionModel, Models.Model modelId, CancellationToken cancellationToken = default) + public static Task Create(this ICompletionService service, CompletionCreateRequest createCompletionModel, Models.Model modelId, CancellationToken cancellationToken = default) { - return CreateCompletion(createCompletionModel, modelId.EnumToString(), cancellationToken); + return service.CreateCompletion(createCompletionModel, modelId.EnumToString(), cancellationToken); } } \ No newline at end of file diff --git a/OpenAI.SDK/Interfaces/IEditService.cs b/OpenAI.SDK/Interfaces/IEditService.cs index 19422df7..53cad721 100644 --- a/OpenAI.SDK/Interfaces/IEditService.cs +++ b/OpenAI.SDK/Interfaces/IEditService.cs @@ -17,16 +17,20 @@ public interface IEditService /// Propagates notification that operations should be canceled. /// Task CreateEdit(EditCreateRequest editCreate, string? modelId = null, CancellationToken cancellationToken = default); +} +public static class IEditServiceExtension +{ /// /// Creates a new edit for the provided input, instruction, and parameters /// + /// /// /// The ID of the model to use for this request /// Propagates notification that operations should be canceled. /// - Task Edit(EditCreateRequest editCreate, Models.Model modelId, CancellationToken cancellationToken = default) + public static Task Edit(this IEditService service, EditCreateRequest editCreate, Models.Model modelId, CancellationToken cancellationToken = default) { - return CreateEdit(editCreate, modelId.EnumToString(), cancellationToken); + return service.CreateEdit(editCreate, modelId.EnumToString(), cancellationToken); } } \ No newline at end of file diff --git a/OpenAI.SDK/Interfaces/IFileService.cs b/OpenAI.SDK/Interfaces/IFileService.cs index c8482b39..2e2a5dd0 100644 --- a/OpenAI.SDK/Interfaces/IFileService.cs +++ b/OpenAI.SDK/Interfaces/IFileService.cs @@ -35,21 +35,6 @@ public interface IFileService /// Task UploadFile(string purpose, byte[] file, string fileName, CancellationToken cancellationToken = default); - Task FileUpload(string purpose, Stream file, string fileName, CancellationToken cancellationToken = default) - { - return UploadFile(purpose, file.ToByteArray(), fileName, cancellationToken); - } - - Task FileUpload(UploadFilePurposes.UploadFilePurpose purpose, Stream file, string fileName, CancellationToken cancellationToken = default) - { - return UploadFile(purpose.EnumToString(), file.ToByteArray(), fileName, cancellationToken); - } - - Task FileUpload(UploadFilePurposes.UploadFilePurpose purpose, byte[] file, string fileName, CancellationToken cancellationToken = default) - { - return UploadFile(purpose.EnumToString(), file, fileName, cancellationToken); - } - /// /// Delete a file. /// @@ -72,16 +57,35 @@ Task FileUpload(UploadFilePurposes.UploadFilePurpose purpose /// The ID of the file to use for this request /// Propagates notification that operations should be canceled. /// - Task> RetrieveFileContent(string fileId, CancellationToken cancellationToken = default) + Task> RetrieveFileContent(string fileId, CancellationToken cancellationToken = default); +} + +public static class IFileServiceExtension +{ + public static Task FileUpload(this IFileService service, string purpose, Stream file, string fileName, CancellationToken cancellationToken = default) + { + return service.UploadFile(purpose, file.ToByteArray(), fileName, cancellationToken); + } + + public static Task FileUpload(this IFileService service, UploadFilePurposes.UploadFilePurpose purpose, Stream file, string fileName, CancellationToken cancellationToken = default) + { + return service.UploadFile(purpose.EnumToString(), file.ToByteArray(), fileName, cancellationToken); + } + + public static Task FileUpload(this IFileService service, UploadFilePurposes.UploadFilePurpose purpose, byte[] file, string fileName, CancellationToken cancellationToken = default) { - return RetrieveFileContent(fileId, cancellationToken); + return service.UploadFile(purpose.EnumToString(), file, fileName, cancellationToken); } /// /// Returns the contents of the specified file /// + /// /// The ID of the file to use for this request /// Propagates notification that operations should be canceled. /// - Task> RetrieveFileContent(string fileId, CancellationToken cancellationToken = default); + public static Task> RetrieveFileContent(this IFileService service, string fileId, CancellationToken cancellationToken = default) + { + return service.RetrieveFileContent(fileId, cancellationToken); + } } \ No newline at end of file diff --git a/OpenAI.SDK/Interfaces/IImageService.cs b/OpenAI.SDK/Interfaces/IImageService.cs index 602e53a1..5a1b9312 100644 --- a/OpenAI.SDK/Interfaces/IImageService.cs +++ b/OpenAI.SDK/Interfaces/IImageService.cs @@ -17,17 +17,6 @@ public interface IImageService /// Task CreateImage(ImageCreateRequest imageCreate, CancellationToken cancellationToken = default); - /// - /// Creates an image given a prompt. - /// - /// - /// Propagates notification that operations should be canceled. - /// - Task CreateImage(string prompt, CancellationToken cancellationToken = default) - { - return CreateImage(new ImageCreateRequest(prompt), cancellationToken); - } - /// /// Creates an edited or extended image given an original image and a prompt. /// @@ -43,4 +32,19 @@ Task CreateImage(string prompt, CancellationToken cancellat /// Propagates notification that operations should be canceled. /// Task CreateImageVariation(ImageVariationCreateRequest imageEditCreateRequest, CancellationToken cancellationToken = default); +} + +public static class IImageServiceExtension +{ + /// + /// Creates an image given a prompt. + /// + /// + /// + /// Propagates notification that operations should be canceled. + /// + public static Task CreateImage(this IImageService service, string prompt, CancellationToken cancellationToken = default) + { + return service.CreateImage(new ImageCreateRequest(prompt), cancellationToken); + } } \ No newline at end of file diff --git a/OpenAI.SDK/Interfaces/IModerationService.cs b/OpenAI.SDK/Interfaces/IModerationService.cs index 59b7cba9..fc927455 100644 --- a/OpenAI.SDK/Interfaces/IModerationService.cs +++ b/OpenAI.SDK/Interfaces/IModerationService.cs @@ -16,10 +16,14 @@ public interface IModerationService /// Propagates notification that operations should be canceled. /// Task CreateModeration(CreateModerationRequest createModerationRequest, CancellationToken cancellationToken = default); +} +public static class IModerationServiceExtension +{ /// /// Classifies if text violates OpenAI's Content Policy /// + /// /// The input text to classify /// /// Two content moderations models are available: text-moderation-stable and text-moderation-latest. @@ -29,9 +33,9 @@ public interface IModerationService /// /// Propagates notification that operations should be canceled. /// - Task CreateModeration(string input, string? model = null, CancellationToken cancellationToken = default) + public static Task CreateModeration(this IModerationService service, string input, string? model = null, CancellationToken cancellationToken = default) { - return CreateModeration(new CreateModerationRequest + return service.CreateModeration(new CreateModerationRequest { Input = input, Model = model From d2ead714f572a1ab331a0b89c04dba72dfafdccd Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 11:29:16 +0800 Subject: [PATCH 04/14] Replace range indexer with SubString as range indexer does not exist in .Net Framework 2.0 --- OpenAI.SDK/Extensions/StringExtensions.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/OpenAI.SDK/Extensions/StringExtensions.cs b/OpenAI.SDK/Extensions/StringExtensions.cs index 1e770266..289382f1 100644 --- a/OpenAI.SDK/Extensions/StringExtensions.cs +++ b/OpenAI.SDK/Extensions/StringExtensions.cs @@ -6,7 +6,7 @@ public static class StringExtensions { /// - /// Remove the search string from the begging of string if exist + /// Remove the search string from the beginning of string if it exists /// /// /// @@ -14,6 +14,6 @@ public static class StringExtensions public static string RemoveIfStartWith(this string text, string search) { var pos = text.IndexOf(search, StringComparison.Ordinal); - return pos != 0 ? text : text[search.Length..]; + return pos != 0 ? text : text.Substring(search.Length); } } \ No newline at end of file From 7b75500bcb40285d03912a0e01fc46e813e8e897 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 11:38:21 +0800 Subject: [PATCH 05/14] Use a char instead of a string for maximum compatibility with string.Split() --- OpenAI.SDK/Tokenizer/GPT3/GPT3Settings.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/OpenAI.SDK/Tokenizer/GPT3/GPT3Settings.cs b/OpenAI.SDK/Tokenizer/GPT3/GPT3Settings.cs index 40d41623..0318a9ed 100644 --- a/OpenAI.SDK/Tokenizer/GPT3/GPT3Settings.cs +++ b/OpenAI.SDK/Tokenizer/GPT3/GPT3Settings.cs @@ -17,7 +17,7 @@ internal static class TokenizerGpt3Settings private static Dictionary, int> BuildBpeRanks() { - var lines = EmbeddedResource.Read("vocab.bpe").Split("\n"); + var lines = EmbeddedResource.Read("vocab.bpe").Split('\n'); var bpeMerges = new ArraySegment(lines, 1, lines.Length - 1) .Where(x => x.Trim().Length > 0) .Select(x => From bb37f98dfb7e97067c36e2a54d206bc0e67d410a Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 12:09:28 +0800 Subject: [PATCH 06/14] Add alternate code path for .Net Standard 2.0 so that we can keep the same method siganture. --- OpenAI.SDK/Extensions/HttpclientExtensions.cs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/OpenAI.SDK/Extensions/HttpclientExtensions.cs b/OpenAI.SDK/Extensions/HttpclientExtensions.cs index fea9cdda..991c27ec 100644 --- a/OpenAI.SDK/Extensions/HttpclientExtensions.cs +++ b/OpenAI.SDK/Extensions/HttpclientExtensions.cs @@ -29,7 +29,13 @@ public static HttpResponseMessage PostAsStreamAsync(this HttpClient client, stri request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("text/event-stream")); request.Content = content; +#if NET6_0 return client.Send(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken); +#else + var responseTask = client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken); + var response = responseTask.GetAwaiter().GetResult(); + return response; +#endif } public static async Task PostFileAndReadAsAsync(this HttpClient client, string uri, HttpContent content, CancellationToken cancellationToken = default) From 02adb40320dd0a1c42d7ace63ff36c8e5e208ee2 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 13:28:06 +0800 Subject: [PATCH 07/14] Provide .Net Standard 2 HttpContent and HttpClient extension methods to maintain backwards compatibility. --- OpenAI.SDK/Extensions/HttpclientExtensions.cs | 41 ++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/OpenAI.SDK/Extensions/HttpclientExtensions.cs b/OpenAI.SDK/Extensions/HttpclientExtensions.cs index 991c27ec..c50f7840 100644 --- a/OpenAI.SDK/Extensions/HttpclientExtensions.cs +++ b/OpenAI.SDK/Extensions/HttpclientExtensions.cs @@ -55,4 +55,43 @@ public static async Task DeleteAndReadAsAsync(this HttpCli var response = await client.DeleteAsync(uri, cancellationToken); return await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken) ?? throw new InvalidOperationException(); } -} \ No newline at end of file + +#if NETSTANDARD2_0 + public static async Task ReadAsStringAsync(this HttpContent content, CancellationToken cancellationToken) + { + var stream = await content.ReadAsStreamAsync().WithCancellation(cancellationToken); + using var sr = new StreamReader(stream); + return await sr.ReadToEndAsync().WithCancellation(cancellationToken); + } + + public static async Task ReadAsStreamAsync(this HttpContent content, CancellationToken cancellationToken) + { + return await content.ReadAsStreamAsync().WithCancellation(cancellationToken); + } + + public static async Task ReadAsByteArrayAsync(this HttpContent content, CancellationToken cancellationToken) + { + return await content.ReadAsByteArrayAsync().WithCancellation(cancellationToken); + } + + public static async Task GetStreamAsync(this HttpClient client, string requestUri, CancellationToken cancellationToken) + { + var response = await client.GetAsync(requestUri, cancellationToken); + return await response.Content.ReadAsStreamAsync(cancellationToken); + } + + public static async Task WithCancellation(this Task task, CancellationToken cancellationToken) + { + var tcs = new TaskCompletionSource(); + using (cancellationToken.Register(s => ((TaskCompletionSource)s).TrySetResult(true), tcs)) + { + if (task != await Task.WhenAny(task, tcs.Task)) + { + throw new OperationCanceledException(cancellationToken); + } + } + + return await task; + } +#endif +} From dc31b3f3ac999c00dda2eeaee0447d7a89dd9d60 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 15:10:33 +0800 Subject: [PATCH 08/14] Replace 'await using' with a try/finally block because 'await using' is not defined for .Net Standard 2.0. --- OpenAI.SDK/Managers/OpenAIChatCompletions.cs | 82 ++++++++++++-------- OpenAI.SDK/Managers/OpenAICompletions.cs | 82 ++++++++++++-------- 2 files changed, 100 insertions(+), 64 deletions(-) diff --git a/OpenAI.SDK/Managers/OpenAIChatCompletions.cs b/OpenAI.SDK/Managers/OpenAIChatCompletions.cs index 746e1506..356ebd0f 100644 --- a/OpenAI.SDK/Managers/OpenAIChatCompletions.cs +++ b/OpenAI.SDK/Managers/OpenAIChatCompletions.cs @@ -27,46 +27,64 @@ public async IAsyncEnumerable CreateCompletionAsSt chatCompletionCreateRequest.ProcessModelId(modelId, _defaultModelId); using var response = _httpClient.PostAsStreamAsync(_endpointProvider.ChatCompletionCreate(), chatCompletionCreateRequest, cancellationToken); - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken); - using var reader = new StreamReader(stream); - // Continuously read the stream until the end of it - while (!reader.EndOfStream) + Stream? stream = null; + try { - cancellationToken.ThrowIfCancellationRequested(); - - var line = await reader.ReadLineAsync(); - // Skip empty lines - if (string.IsNullOrEmpty(line)) + stream = await response.Content.ReadAsStreamAsync(cancellationToken); + using var reader = new StreamReader(stream); + // Continuously read the stream until the end of it + while (!reader.EndOfStream) { - continue; - } + cancellationToken.ThrowIfCancellationRequested(); - line = line.RemoveIfStartWith("data: "); + var line = await reader.ReadLineAsync(); + // Skip empty lines + if (string.IsNullOrEmpty(line)) + { + continue; + } - // Exit the loop if the stream is done - if (line.StartsWith("[DONE]")) - { - break; - } + line = line.RemoveIfStartWith("data: "); - ChatCompletionCreateResponse? block; - try - { - // When the response is good, each line is a serializable CompletionCreateRequest - block = JsonSerializer.Deserialize(line); - } - catch (Exception) - { - // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). - // In this instance, read through the rest of the response, which should be a complete object to parse. - line += await reader.ReadToEndAsync(); - block = JsonSerializer.Deserialize(line); - } + // Exit the loop if the stream is done + if (line.StartsWith("[DONE]")) + { + break; + } + + ChatCompletionCreateResponse? block; + try + { + // When the response is good, each line is a serializable CompletionCreateRequest + block = JsonSerializer.Deserialize(line); + } + catch (Exception) + { + // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). + // In this instance, read through the rest of the response, which should be a complete object to parse. + line += await reader.ReadToEndAsync(); + block = JsonSerializer.Deserialize(line); + } - if (null != block) + if (null != block) + { + yield return block; + } + } + } + finally + { + if (stream is not null) { - yield return block; + if (stream is IAsyncDisposable asyncDisposable) + { + await asyncDisposable.DisposeAsync(); + } + else + { + stream.Dispose(); + } } } } diff --git a/OpenAI.SDK/Managers/OpenAICompletions.cs b/OpenAI.SDK/Managers/OpenAICompletions.cs index dc8e373e..a37ca36c 100644 --- a/OpenAI.SDK/Managers/OpenAICompletions.cs +++ b/OpenAI.SDK/Managers/OpenAICompletions.cs @@ -26,46 +26,64 @@ public async IAsyncEnumerable CreateCompletionAsStream createCompletionRequest.ProcessModelId(modelId, _defaultModelId); using var response = _httpClient.PostAsStreamAsync(_endpointProvider.CompletionCreate(), createCompletionRequest, cancellationToken); - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken); - using var reader = new StreamReader(stream); - // Continuously read the stream until the end of it - while (!reader.EndOfStream) + Stream? stream = null; + try { - cancellationToken.ThrowIfCancellationRequested(); - - var line = await reader.ReadLineAsync(); - // Skip empty lines - if (string.IsNullOrEmpty(line)) + stream = await response.Content.ReadAsStreamAsync(cancellationToken); + using var reader = new StreamReader(stream); + // Continuously read the stream until the end of it + while (!reader.EndOfStream) { - continue; - } + cancellationToken.ThrowIfCancellationRequested(); - line = line.RemoveIfStartWith("data: "); + var line = await reader.ReadLineAsync(); + // Skip empty lines + if (string.IsNullOrEmpty(line)) + { + continue; + } - // Exit the loop if the stream is done - if (line.StartsWith("[DONE]")) - { - break; - } + line = line.RemoveIfStartWith("data: "); - CompletionCreateResponse? block; - try - { - // When the response is good, each line is a serializable CompletionCreateRequest - block = JsonSerializer.Deserialize(line); - } - catch (Exception) - { - // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). - // In this instance, read through the rest of the response, which should be a complete object to parse. - line += await reader.ReadToEndAsync(); - block = JsonSerializer.Deserialize(line); - } + // Exit the loop if the stream is done + if (line.StartsWith("[DONE]")) + { + break; + } + + CompletionCreateResponse? block; + try + { + // When the response is good, each line is a serializable CompletionCreateRequest + block = JsonSerializer.Deserialize(line); + } + catch (Exception) + { + // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). + // In this instance, read through the rest of the response, which should be a complete object to parse. + line += await reader.ReadToEndAsync(); + block = JsonSerializer.Deserialize(line); + } - if (null != block) + if (null != block) + { + yield return block; + } + } + } + finally + { + if (stream is not null) { - yield return block; + if (stream is IAsyncDisposable asyncDisposable) + { + await asyncDisposable.DisposeAsync(); + } + else + { + stream.Dispose(); + } } } } From 39222f3cbf2eb817d73e3efe93e438ce9ba35471 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Mon, 6 Mar 2023 16:32:19 +0800 Subject: [PATCH 09/14] Future .Net compatibility --- OpenAI.SDK/Extensions/HttpclientExtensions.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/OpenAI.SDK/Extensions/HttpclientExtensions.cs b/OpenAI.SDK/Extensions/HttpclientExtensions.cs index c50f7840..d435420f 100644 --- a/OpenAI.SDK/Extensions/HttpclientExtensions.cs +++ b/OpenAI.SDK/Extensions/HttpclientExtensions.cs @@ -29,7 +29,7 @@ public static HttpResponseMessage PostAsStreamAsync(this HttpClient client, stri request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("text/event-stream")); request.Content = content; -#if NET6_0 +#if NET6_0_OR_GREATER return client.Send(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken); #else var responseTask = client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken); From cb6dd0641bb7195b01091073ec35a83b449535d5 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Tue, 7 Mar 2023 10:38:49 +0800 Subject: [PATCH 10/14] Revert CreateCompletionAsStream to previous implementation so that we don't break .Net 6 async stream behaviour. --- OpenAI.SDK/Managers/OpenAIChatCompletions.cs | 82 ++++++++------------ OpenAI.SDK/Managers/OpenAICompletions.cs | 82 ++++++++------------ 2 files changed, 64 insertions(+), 100 deletions(-) diff --git a/OpenAI.SDK/Managers/OpenAIChatCompletions.cs b/OpenAI.SDK/Managers/OpenAIChatCompletions.cs index 356ebd0f..746e1506 100644 --- a/OpenAI.SDK/Managers/OpenAIChatCompletions.cs +++ b/OpenAI.SDK/Managers/OpenAIChatCompletions.cs @@ -27,64 +27,46 @@ public async IAsyncEnumerable CreateCompletionAsSt chatCompletionCreateRequest.ProcessModelId(modelId, _defaultModelId); using var response = _httpClient.PostAsStreamAsync(_endpointProvider.ChatCompletionCreate(), chatCompletionCreateRequest, cancellationToken); - Stream? stream = null; - try + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken); + using var reader = new StreamReader(stream); + // Continuously read the stream until the end of it + while (!reader.EndOfStream) { - stream = await response.Content.ReadAsStreamAsync(cancellationToken); - using var reader = new StreamReader(stream); - // Continuously read the stream until the end of it - while (!reader.EndOfStream) - { - cancellationToken.ThrowIfCancellationRequested(); + cancellationToken.ThrowIfCancellationRequested(); - var line = await reader.ReadLineAsync(); - // Skip empty lines - if (string.IsNullOrEmpty(line)) - { - continue; - } + var line = await reader.ReadLineAsync(); + // Skip empty lines + if (string.IsNullOrEmpty(line)) + { + continue; + } - line = line.RemoveIfStartWith("data: "); + line = line.RemoveIfStartWith("data: "); - // Exit the loop if the stream is done - if (line.StartsWith("[DONE]")) - { - break; - } + // Exit the loop if the stream is done + if (line.StartsWith("[DONE]")) + { + break; + } - ChatCompletionCreateResponse? block; - try - { - // When the response is good, each line is a serializable CompletionCreateRequest - block = JsonSerializer.Deserialize(line); - } - catch (Exception) - { - // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). - // In this instance, read through the rest of the response, which should be a complete object to parse. - line += await reader.ReadToEndAsync(); - block = JsonSerializer.Deserialize(line); - } + ChatCompletionCreateResponse? block; + try + { + // When the response is good, each line is a serializable CompletionCreateRequest + block = JsonSerializer.Deserialize(line); + } + catch (Exception) + { + // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). + // In this instance, read through the rest of the response, which should be a complete object to parse. + line += await reader.ReadToEndAsync(); + block = JsonSerializer.Deserialize(line); + } - if (null != block) - { - yield return block; - } - } - } - finally - { - if (stream is not null) + if (null != block) { - if (stream is IAsyncDisposable asyncDisposable) - { - await asyncDisposable.DisposeAsync(); - } - else - { - stream.Dispose(); - } + yield return block; } } } diff --git a/OpenAI.SDK/Managers/OpenAICompletions.cs b/OpenAI.SDK/Managers/OpenAICompletions.cs index a37ca36c..dc8e373e 100644 --- a/OpenAI.SDK/Managers/OpenAICompletions.cs +++ b/OpenAI.SDK/Managers/OpenAICompletions.cs @@ -26,64 +26,46 @@ public async IAsyncEnumerable CreateCompletionAsStream createCompletionRequest.ProcessModelId(modelId, _defaultModelId); using var response = _httpClient.PostAsStreamAsync(_endpointProvider.CompletionCreate(), createCompletionRequest, cancellationToken); - Stream? stream = null; - try + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken); + using var reader = new StreamReader(stream); + // Continuously read the stream until the end of it + while (!reader.EndOfStream) { - stream = await response.Content.ReadAsStreamAsync(cancellationToken); - using var reader = new StreamReader(stream); - // Continuously read the stream until the end of it - while (!reader.EndOfStream) - { - cancellationToken.ThrowIfCancellationRequested(); + cancellationToken.ThrowIfCancellationRequested(); - var line = await reader.ReadLineAsync(); - // Skip empty lines - if (string.IsNullOrEmpty(line)) - { - continue; - } + var line = await reader.ReadLineAsync(); + // Skip empty lines + if (string.IsNullOrEmpty(line)) + { + continue; + } - line = line.RemoveIfStartWith("data: "); + line = line.RemoveIfStartWith("data: "); - // Exit the loop if the stream is done - if (line.StartsWith("[DONE]")) - { - break; - } + // Exit the loop if the stream is done + if (line.StartsWith("[DONE]")) + { + break; + } - CompletionCreateResponse? block; - try - { - // When the response is good, each line is a serializable CompletionCreateRequest - block = JsonSerializer.Deserialize(line); - } - catch (Exception) - { - // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). - // In this instance, read through the rest of the response, which should be a complete object to parse. - line += await reader.ReadToEndAsync(); - block = JsonSerializer.Deserialize(line); - } + CompletionCreateResponse? block; + try + { + // When the response is good, each line is a serializable CompletionCreateRequest + block = JsonSerializer.Deserialize(line); + } + catch (Exception) + { + // When the API returns an error, it does not come back as a block, it returns a single character of text ("{"). + // In this instance, read through the rest of the response, which should be a complete object to parse. + line += await reader.ReadToEndAsync(); + block = JsonSerializer.Deserialize(line); + } - if (null != block) - { - yield return block; - } - } - } - finally - { - if (stream is not null) + if (null != block) { - if (stream is IAsyncDisposable asyncDisposable) - { - await asyncDisposable.DisposeAsync(); - } - else - { - stream.Dispose(); - } + yield return block; } } } From ed19f8a0a7c9de6c420423f1124a5d0405748280 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Tue, 7 Mar 2023 13:03:19 +0800 Subject: [PATCH 11/14] Generate an IAsyncDisposable stream for .Net Standard 2 to maintain forwards compatibility with the .Net 6 codebase. --- .../Extensions/AsyncDisposableStream.cs | 182 ++++++++++++++++++ OpenAI.SDK/Extensions/HttpclientExtensions.cs | 5 +- 2 files changed, 185 insertions(+), 2 deletions(-) create mode 100644 OpenAI.SDK/Extensions/AsyncDisposableStream.cs diff --git a/OpenAI.SDK/Extensions/AsyncDisposableStream.cs b/OpenAI.SDK/Extensions/AsyncDisposableStream.cs new file mode 100644 index 00000000..85a62af7 --- /dev/null +++ b/OpenAI.SDK/Extensions/AsyncDisposableStream.cs @@ -0,0 +1,182 @@ +#if NETSTANDARD2_0 +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace OpenAI.GPT3.Extensions +{ + public class AsyncDisposableStream : Stream, IAsyncDisposable + { + private readonly Stream _innerStream; + + public AsyncDisposableStream(Stream stream) + { + _innerStream = stream; + } + + public new Task CopyToAsync(Stream destination) + { + return _innerStream.CopyToAsync(destination); + } + + public new Task CopyToAsync(Stream destination, int bufferSize) + { + return _innerStream.CopyToAsync(destination, bufferSize); + } + + public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) + { + return _innerStream.CopyToAsync(destination, bufferSize, cancellationToken); + } + + public new void CopyTo(Stream destination) + { + _innerStream.CopyTo(destination); + } + + public new void CopyTo(Stream destination, int bufferSize) + { + _innerStream.CopyTo(destination, bufferSize); + } + + public override void Close() + { + _innerStream.Close(); + } + + public new void Dispose() + { + _innerStream.Dispose(); + } + + public override void Flush() + { + _innerStream.Flush(); + } + + public new Task FlushAsync() + { + return _innerStream.FlushAsync(); + } + + public override Task FlushAsync(CancellationToken cancellationToken) + { + return _innerStream.FlushAsync(cancellationToken); + } + + public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) + { + return _innerStream.BeginRead(buffer, offset, count, callback, state); + } + + public override int EndRead(IAsyncResult asyncResult) + { + return _innerStream.EndRead(asyncResult); + } + + public new Task ReadAsync(byte[] buffer, int offset, int count) + { + return _innerStream.ReadAsync(buffer, offset, count); + } + + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + return _innerStream.ReadAsync(buffer, offset, count, cancellationToken); + } + + public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) + { + return _innerStream.BeginWrite(buffer, offset, count, callback, state); + } + + public override void EndWrite(IAsyncResult asyncResult) + { + _innerStream.EndWrite(asyncResult); + } + + public new Task WriteAsync(byte[] buffer, int offset, int count) + { + return _innerStream.WriteAsync(buffer, offset, count); + } + + public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + return _innerStream.WriteAsync(buffer, offset, count, cancellationToken); + } + + public override long Seek(long offset, SeekOrigin origin) + { + return _innerStream.Seek(offset, origin); + } + + public override void SetLength(long value) + { + _innerStream.SetLength(value); + } + + public override int Read(byte[] buffer, int offset, int count) + { + return _innerStream.Read(buffer, offset, count); + } + + public override int ReadByte() + { + return _innerStream.ReadByte(); + } + + public override void Write(byte[] buffer, int offset, int count) + { + _innerStream.Write(buffer, offset, count); + } + + public override void WriteByte(byte value) + { + _innerStream.WriteByte(value); + } + + public override bool CanRead => _innerStream.CanRead; + + public override bool CanSeek => _innerStream.CanSeek; + + public override bool CanTimeout => _innerStream.CanTimeout; + + public override bool CanWrite => _innerStream.CanWrite; + + public override long Length => _innerStream.Length; + + public override long Position + { + get => _innerStream.Position; + set => _innerStream.Position = value; + } + + public override int ReadTimeout + { + get => _innerStream.ReadTimeout; + set => _innerStream.ReadTimeout = value; + } + + public override int WriteTimeout + { + get => _innerStream.WriteTimeout; + set => _innerStream.WriteTimeout = value; + } + + public async ValueTask DisposeAsync() + { + if (_innerStream != null) + { + if (_innerStream is IAsyncDisposable asyncDisposable) + { + await asyncDisposable.DisposeAsync().ConfigureAwait(false); + } + else + { + await Task.Run(() => _innerStream.Dispose()).ConfigureAwait(false); + } + } + } + } +} +#endif \ No newline at end of file diff --git a/OpenAI.SDK/Extensions/HttpclientExtensions.cs b/OpenAI.SDK/Extensions/HttpclientExtensions.cs index d435420f..a9451413 100644 --- a/OpenAI.SDK/Extensions/HttpclientExtensions.cs +++ b/OpenAI.SDK/Extensions/HttpclientExtensions.cs @@ -64,9 +64,10 @@ public static async Task ReadAsStringAsync(this HttpContent content, Can return await sr.ReadToEndAsync().WithCancellation(cancellationToken); } - public static async Task ReadAsStreamAsync(this HttpContent content, CancellationToken cancellationToken) + public static async Task ReadAsStreamAsync(this HttpContent content, CancellationToken cancellationToken) { - return await content.ReadAsStreamAsync().WithCancellation(cancellationToken); + var stream = await content.ReadAsStreamAsync().WithCancellation(cancellationToken); + return new AsyncDisposableStream(stream); } public static async Task ReadAsByteArrayAsync(this HttpContent content, CancellationToken cancellationToken) From faca949af942584e58c56400c553d64d74013aee Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Wed, 8 Mar 2023 10:31:57 +0800 Subject: [PATCH 12/14] Compatible changes between the 2 different frameworks. Specify needed package references for .Net Standard 2. Exclude LaserCatEyes for .Net Standard 2 as it only supports .Net 6 or later. --- OpenAI.Playground/FileExtensions.cs | 36 +++++++++++++++++++ OpenAI.Playground/OpenAI.Playground.csproj | 14 ++++++-- OpenAI.Playground/Program.cs | 6 +++- .../TestHelpers/AudioTestHelper.cs | 4 +-- .../TestHelpers/FileTestHelper.cs | 2 +- .../TestHelpers/FineTuningTestHelper.cs | 2 +- .../TestHelpers/ImageTestHelper.cs | 6 ++-- .../TestHelpers/TokenizerTestHelper.cs | 2 +- 8 files changed, 61 insertions(+), 11 deletions(-) create mode 100644 OpenAI.Playground/FileExtensions.cs diff --git a/OpenAI.Playground/FileExtensions.cs b/OpenAI.Playground/FileExtensions.cs new file mode 100644 index 00000000..666c9d20 --- /dev/null +++ b/OpenAI.Playground/FileExtensions.cs @@ -0,0 +1,36 @@ +namespace OpenAI.Playground; + +using System.IO; +using System.Threading.Tasks; + +public static class FileExtensions +{ + public static async Task ReadAllBytesAsync(string path) + { +#if NET6_0_OR_GREATER + return await File.ReadAllBytesAsync(path); +#else + byte[] buffer; + using (var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true)) + { + buffer = new byte[stream.Length]; + await stream.ReadAsync(buffer, 0, (int)stream.Length); + } + return buffer; +#endif + } + + public static async Task ReadAllTextAsync(string path) + { +#if NET6_0_OR_GREATER + return await File.ReadAllTextAsync(path); +#else + string text; + using (var reader = new StreamReader(path)) + { + text = await reader.ReadToEndAsync(); + } + return text; +#endif + } +} diff --git a/OpenAI.Playground/OpenAI.Playground.csproj b/OpenAI.Playground/OpenAI.Playground.csproj index e54ec75a..6bd03eb1 100644 --- a/OpenAI.Playground/OpenAI.Playground.csproj +++ b/OpenAI.Playground/OpenAI.Playground.csproj @@ -2,7 +2,8 @@ Exe - net6.0 + net6.0;netstandard2.0 + PackageReference enable enable c7a3a51e-4319-4038-b522-9ed24fd5ef66 @@ -19,8 +20,11 @@ - + + + + @@ -30,6 +34,12 @@ + + + + + + diff --git a/OpenAI.Playground/Program.cs b/OpenAI.Playground/Program.cs index 75fcf63f..3737e74b 100644 --- a/OpenAI.Playground/Program.cs +++ b/OpenAI.Playground/Program.cs @@ -1,4 +1,6 @@ -using LaserCatEyes.HttpClientListener; +#if NET6_0_OR_GREATER +using LaserCatEyes.HttpClientListener; +#endif using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using OpenAI.GPT3.Extensions; @@ -13,10 +15,12 @@ var serviceCollection = new ServiceCollection(); serviceCollection.AddScoped(_ => configuration); +#if NET6_0_OR_GREATER // Laser cat eyes is a tool that shows your requests and responses between OpenAI server and your client. // Get your app key from https://lasercateyes.com for FREE and put it under ApiSettings.json or secrets.json. // It is in Beta version, if you don't want to use it just comment out below line. serviceCollection.AddLaserCatEyesHttpClientListener(); +#endif serviceCollection.AddOpenAIService(); //// DeploymentId and ResourceName are only for Azure OpenAI. If you want to use Azure OpenAI services you have to set Provider type To Azure. diff --git a/OpenAI.Playground/TestHelpers/AudioTestHelper.cs b/OpenAI.Playground/TestHelpers/AudioTestHelper.cs index f00df448..6558ca6f 100644 --- a/OpenAI.Playground/TestHelpers/AudioTestHelper.cs +++ b/OpenAI.Playground/TestHelpers/AudioTestHelper.cs @@ -15,7 +15,7 @@ public static async Task RunSimpleAudioCreateTranscriptionTest(IOpenAIService sd ConsoleExtensions.WriteLine("Audio Create Transcription Test:", ConsoleColor.DarkCyan); const string fileName = "micro-machines.mp3"; - var sampleFile = await File.ReadAllBytesAsync($"SampleData/{fileName}"); + var sampleFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{fileName}"); ConsoleExtensions.WriteLine($"Uploading file {fileName}", ConsoleColor.DarkCyan); var audioResult = await sdk.Audio.CreateTranscription(new AudioCreateTranscriptionRequest @@ -57,7 +57,7 @@ public static async Task RunSimpleAudioCreateTranslationTest(IOpenAIService sdk) ConsoleExtensions.WriteLine("Audio Create Translation Test:", ConsoleColor.DarkCyan); const string fileName = "multilingual.mp3"; - var sampleFile = await File.ReadAllBytesAsync($"SampleData/{fileName}"); + var sampleFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{fileName}"); ConsoleExtensions.WriteLine($"Uploading file {fileName}", ConsoleColor.DarkCyan); var audioResult = await sdk.Audio.CreateTranslation(new AudioCreateTranscriptionRequest diff --git a/OpenAI.Playground/TestHelpers/FileTestHelper.cs b/OpenAI.Playground/TestHelpers/FileTestHelper.cs index 53c1f5b9..a3651b8c 100644 --- a/OpenAI.Playground/TestHelpers/FileTestHelper.cs +++ b/OpenAI.Playground/TestHelpers/FileTestHelper.cs @@ -14,7 +14,7 @@ public static async Task RunSimpleFileTest(IOpenAIService sdk) { const string fileName = "SentimentAnalysisSample.jsonl"; - var sampleFile = await File.ReadAllBytesAsync($"SampleData/{fileName}"); + var sampleFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{fileName}"); var sampleFileAsString = Encoding.UTF8.GetString(sampleFile); ConsoleExtensions.WriteLine($"Uploading file {fileName}", ConsoleColor.DarkCyan); diff --git a/OpenAI.Playground/TestHelpers/FineTuningTestHelper.cs b/OpenAI.Playground/TestHelpers/FineTuningTestHelper.cs index 78ea339d..0a5a4854 100644 --- a/OpenAI.Playground/TestHelpers/FineTuningTestHelper.cs +++ b/OpenAI.Playground/TestHelpers/FineTuningTestHelper.cs @@ -14,7 +14,7 @@ public static async Task RunCaseStudyIsTheModelMakingUntrueStatements(IOpenAISer try { const string fileName = "FineTuningSample1.jsonl"; - var sampleFile = await File.ReadAllBytesAsync($"SampleData/{fileName}"); + var sampleFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{fileName}"); ConsoleExtensions.WriteLine($"Uploading file {fileName}", ConsoleColor.DarkCyan); var uploadFilesResponse = await sdk.Files.FileUpload(UploadFilePurposes.UploadFilePurpose.FineTune, sampleFile, fileName); diff --git a/OpenAI.Playground/TestHelpers/ImageTestHelper.cs b/OpenAI.Playground/TestHelpers/ImageTestHelper.cs index 4191a73c..8ebe9db3 100644 --- a/OpenAI.Playground/TestHelpers/ImageTestHelper.cs +++ b/OpenAI.Playground/TestHelpers/ImageTestHelper.cs @@ -52,8 +52,8 @@ public static async Task RunSimpleCreateImageEditTest(IOpenAIService sdk) // Images should be in png format with ARGB. I got help from this website to generate sample mask // https://www.online-image-editor.com/ - var maskFile = await File.ReadAllBytesAsync($"SampleData/{maskFileName}"); - var originalFile = await File.ReadAllBytesAsync($"SampleData/{originalFileName}"); + var maskFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{maskFileName}"); + var originalFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{originalFileName}"); try { @@ -98,7 +98,7 @@ public static async Task RunSimpleCreateImageVariationTest(IOpenAIService sdk) ConsoleExtensions.WriteLine("Image Variation Create Testing is starting:", ConsoleColor.Cyan); const string originalFileName = "image_edit_original.png"; - var originalFile = await File.ReadAllBytesAsync($"SampleData/{originalFileName}"); + var originalFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{originalFileName}"); try { diff --git a/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs b/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs index fc62886e..f088a56c 100644 --- a/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs +++ b/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs @@ -13,7 +13,7 @@ public static async Task RunTokenizerTest() ConsoleExtensions.WriteLine("Tokenizer Test:", ConsoleColor.DarkCyan); const string fileName = "TokenizerSample.txt"; - var input = await File.ReadAllTextAsync($"SampleData/{fileName}"); + var input = await FileExtensions.ReadAllTextAsync($"SampleData/{fileName}"); var encodedList = TokenizerGpt3.Encode(input); if (encodedList.Count == 64) { From b3b2b340a913d563df260e7fe2444a9fb8853161 Mon Sep 17 00:00:00 2001 From: Patrick D'Cruze Date: Wed, 8 Mar 2023 13:35:07 +0800 Subject: [PATCH 13/14] Add binding redirects so that the Playground program works in .Net Standard 2. Downgrade package references and add new package references so it will compile and run in both environments. --- OpenAI.Playground/App.config | 23 ++++++++++++++++++++++ OpenAI.Playground/OpenAI.Playground.csproj | 6 ++++-- OpenAI.SDK/OpenAI.GPT3.csproj | 2 +- 3 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 OpenAI.Playground/App.config diff --git a/OpenAI.Playground/App.config b/OpenAI.Playground/App.config new file mode 100644 index 00000000..6c06e18f --- /dev/null +++ b/OpenAI.Playground/App.config @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/OpenAI.Playground/OpenAI.Playground.csproj b/OpenAI.Playground/OpenAI.Playground.csproj index 6bd03eb1..e2f19244 100644 --- a/OpenAI.Playground/OpenAI.Playground.csproj +++ b/OpenAI.Playground/OpenAI.Playground.csproj @@ -3,6 +3,7 @@ Exe net6.0;netstandard2.0 + true PackageReference enable enable @@ -32,12 +33,13 @@ + - - + + diff --git a/OpenAI.SDK/OpenAI.GPT3.csproj b/OpenAI.SDK/OpenAI.GPT3.csproj index cc239bf2..54199453 100644 --- a/OpenAI.SDK/OpenAI.GPT3.csproj +++ b/OpenAI.SDK/OpenAI.GPT3.csproj @@ -48,7 +48,7 @@ - + From 9eb3d5b8448732758f7d5b0d05dd1c521de6aef8 Mon Sep 17 00:00:00 2001 From: Tolga Kayhan Date: Tue, 14 Mar 2023 23:25:06 +0000 Subject: [PATCH 14/14] Bug Fix on playground, version bump for v6.8.0 --- .../TestHelpers/TokenizerTestHelper.cs | 2 +- OpenAI.SDK/ObjectModels/Models.cs | 58 ++++++++++++++++++- OpenAI.SDK/OpenAI.GPT3.csproj | 4 +- Readme.md | 16 ++--- 4 files changed, 64 insertions(+), 16 deletions(-) diff --git a/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs b/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs index 14523311..e42ca053 100644 --- a/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs +++ b/OpenAI.Playground/TestHelpers/TokenizerTestHelper.cs @@ -41,7 +41,7 @@ public static async Task RunTokenizerTestCrClean() ConsoleExtensions.WriteLine("Tokenizer Test:", ConsoleColor.DarkCyan); const string fileName = "TokenizerSample.txt"; - var input = await File.ReadAllTextAsync($"SampleData/{fileName}"); + var input = await FileExtensions.ReadAllTextAsync($"SampleData/{fileName}"); var encodedList = TokenizerGpt3.Encode(input,true); if (encodedList.Count == 64) { diff --git a/OpenAI.SDK/ObjectModels/Models.cs b/OpenAI.SDK/ObjectModels/Models.cs index 00d1a5cf..34195080 100644 --- a/OpenAI.SDK/ObjectModels/Models.cs +++ b/OpenAI.SDK/ObjectModels/Models.cs @@ -1,5 +1,10 @@ -namespace OpenAI.GPT3.ObjectModels; +using System.Diagnostics.CodeAnalysis; +#pragma warning disable CS1591 +namespace OpenAI.GPT3.ObjectModels; + +[SuppressMessage("ReSharper", "InconsistentNaming")] +[SuppressMessage("ReSharper", "MemberCanBePrivate.Global")] public static class Models { public enum BaseModel @@ -11,6 +16,7 @@ public enum BaseModel Cushman } + [SuppressMessage("ReSharper", "InconsistentNaming")] public enum Model { Ada, @@ -65,7 +71,12 @@ public enum Model ChatGpt3_5Turbo, ChatGpt3_5Turbo0301, - WhisperV1, + Gpt_4, + Gpt_4_0314, + Gpt_4_32k, + Gpt_4_32k_0314, + + WhisperV1 } public enum Subject @@ -84,6 +95,34 @@ public enum Subject TextEmbedding } + /// + /// More capable than any GPT-3.5 model, able to do more complex tasks, and optimized for chat. Will be updated with + /// our latest model iteration. + /// 8,192 tokens Up to Sep 2021 + /// + public static string Gpt4 => "gpt-4"; + + /// + /// Snapshot of gpt-4 from March 14th 2023. Unlike gpt-4, this model will not receive updates, and will only be + /// supported for a three month period ending on June 14th 2023. + /// 8,192 tokens Up to Sep 2021 + /// + public static string Gpt_4_0314 => "gpt-4-0314"; + + /// + /// Same capabilities as the base gpt-4 mode but with 4x the context length. Will be updated with our latest model + /// iteration. + /// 32,768 tokens Up to Sep 2021 + /// + public static string Gpt_4_32k => "gpt-4-32k"; + + /// + /// Snapshot of gpt-4-32 from March 14th 2023. Unlike gpt-4-32k, this model will not receive updates, and will only be + /// supported for a three month period ending on June 14th 2023. + /// 32,768 tokens Up to Sep 2021 + /// + public static string Gpt_4_32k_0314 => "gpt-4-32k-0314"; + public static string Ada => "ada"; public static string Babbage => "babbage"; public static string Curie => "curie"; @@ -129,7 +168,18 @@ public enum Subject public static string TextEmbeddingAdaV2 => ModelNameBuilder(BaseModel.Ada, Subject.TextEmbedding, "002"); + /// + /// Most capable GPT-3.5 model and optimized for chat at 1/10th the cost of text-davinci-003. Will be updated with our + /// latest model iteration. + /// 4,096 tokens Up to Sep 2021 + /// public static string ChatGpt3_5Turbo => "gpt-3.5-turbo"; + + /// + /// Snapshot of gpt-3.5-turbo from March 1st 2023. Unlike gpt-3.5-turbo, this model will not receive updates, and will + /// only be supported for a three month period ending on June 1st 2023. + /// 4,096 tokens Up to Sep 2021 + /// public static string ChatGpt3_5Turbo0301 => "gpt-3.5-turbo-0301"; public static string WhisperV1 => "whisper-1"; @@ -201,6 +251,10 @@ public static string EnumToString(this Model model) Model.ChatGpt3_5Turbo0301 => ChatGpt3_5Turbo0301, Model.WhisperV1 => WhisperV1, Model.TextEmbeddingAdaV2 => TextEmbeddingAdaV2, + Model.Gpt_4 => Gpt4, + Model.Gpt_4_0314 => Gpt_4_0314, + Model.Gpt_4_32k => Gpt_4_32k, + Model.Gpt_4_32k_0314 => Gpt_4_32k_0314, _ => throw new ArgumentOutOfRangeException(nameof(model), model, null) }; } diff --git a/OpenAI.SDK/OpenAI.GPT3.csproj b/OpenAI.SDK/OpenAI.GPT3.csproj index b094797f..4147d489 100644 --- a/OpenAI.SDK/OpenAI.GPT3.csproj +++ b/OpenAI.SDK/OpenAI.GPT3.csproj @@ -10,13 +10,13 @@ https://openai.com/ OpenAI-Betalgo.png true - 6.7.3 + 6.8.0 Tolga Kayhan, Betalgo Betalgo Up Ltd. OpenAI ChatGPT, Whisper, GPT-3 and DALL·E dotnet SDK Dotnet SDK for OpenAI ChatGPT, Whisper, GPT-3 and DALL·E https://github.com/betalgo/openai/ - openAI,chatgpt,gpt-3,DALL·E,whisper,azureOpenAI,ai,betalgo,NLP,dalle,,dall-e,OpenAI,OpenAi,openAi,azure + openAI,chatGPT,gpt-3,gpt-4,DALL·E,whisper,azureOpenAI,ai,betalgo,NLP,dalle,,dall-e,OpenAI,OpenAi,openAi,azure Betalgo.OpenAI.GPT3 Readme.md True diff --git a/Readme.md b/Readme.md index cb5b2616..94d1a4a0 100644 --- a/Readme.md +++ b/Readme.md @@ -26,6 +26,7 @@ https://github.com/betalgo/openai/wiki - [x] Tokenizer Support (has a bug, working on for fixing the issue) - [x] Whisper - [ ] Rate limit support +- [ ] CahtGPT-4 support For changelogs please go to end of the document. @@ -174,8 +175,12 @@ I initially developed this SDK for my personal use and later decided to share it I will always be using the latest libraries, and future releases will frequently include breaking changes. Please take this into consideration before deciding to use the library. I want to make it clear that I cannot accept any responsibility for any damage caused by using the library. If you feel that this is not suitable for your purposes, you are free to explore alternative libraries or the OpenAI Web-API. +I am incredibly busy. If I forgot your name, please accept my apologies and let me know so I can add it to the list. ## Changelog +### 6.8.0 +* Added .Net Standart Support, Massive thanks to @pdcruze and @ricaun + ### 6.7.3 * **Breaking change**: `ChatMessage.FromAssistance` is now `ChatMessage.FromAssistant`. Thanks to @Swimburger * The Tokenizer method has been extended with `cleanUpCREOL`. You can use this option to clean up Windows-style line endings. Thanks to @gspentzas1991 @@ -196,14 +201,3 @@ I will always be using the latest libraries, and future releases will frequently * Added support for Chat GPT API * Fixed Tokenizer Bug, it was not working properly. -### 6.6.8 -* **Breaking Changes** - * Renamed `Engine` keyword to `Model` in accordance with OpenAI's new naming convention. - * Deprecated `DefaultEngineId` in favor of `DefaultModelId`. - * `DefaultEngineId` and `DefaultModelId` is not static anymore. - -* Added support for Azure OpenAI, a big thanks to @copypastedeveloper! -* Added support for Tokenizer, inspired by @dluc's https://github.com/dluc/openai-tools repository. Please consider giving the repo a star. - -These two changes are recent additions, so please let me know if you encounter any issues. -* Updated documentation links from beta.openai.com to platform.openai.com.