diff --git a/OpenAI.Playground/Program.cs b/OpenAI.Playground/Program.cs
index 4cc419bb..914c1673 100644
--- a/OpenAI.Playground/Program.cs
+++ b/OpenAI.Playground/Program.cs
@@ -49,11 +49,12 @@
//await AssistantTestHelper.MessagesTestHelper.RunTests(sdk);
//await AssistantTestHelper.RunTestHelper.RunTests(sdk);
//await AssistantTestHelper.VectorTestHelper.RunTests(sdk);
+//await AssistantTestHelper3.RunTests(sdk);
// Vision
//await VisionTestHelper.RunSimpleVisionTest(sdk);
-//await VisionTestHelper.RunSimpleVisionStreamTest(sdk);
//await VisionTestHelper.RunSimpleVisionTestUsingBase64EncodedImage(sdk);
+//await VisionTestHelper.RunSimpleVisionStreamTest(sdk);
// Tools
//await ChatCompletionTestHelper.RunChatFunctionCallTest(sdk);
diff --git a/OpenAI.Playground/TestHelpers/AssistantHelpers/AssistantTestHelper3.cs b/OpenAI.Playground/TestHelpers/AssistantHelpers/AssistantTestHelper3.cs
index 0f8e11a2..57d7c8d8 100644
--- a/OpenAI.Playground/TestHelpers/AssistantHelpers/AssistantTestHelper3.cs
+++ b/OpenAI.Playground/TestHelpers/AssistantHelpers/AssistantTestHelper3.cs
@@ -10,6 +10,12 @@ namespace OpenAI.Playground.TestHelpers.AssistantHelpers;
internal static class AssistantTestHelper3
{
+ public static async Task RunTests(IOpenAIService sdk)
+ {
+ await RunAssistantApiTest(sdk);
+ await RunHowAssistantsWorkTest(sdk);
+ }
+
///
/// Test Assistant api
///
@@ -133,8 +139,11 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
var sampleFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{fileName}");
var sampleFileAsString = Encoding.UTF8.GetString(sampleFile);
+ // HACK: drp052424 - CSV format is not supported for Assistant, so we tell OpenAI it's a TXT. A better solution would be to update the test asset.
+ var fileNameForOpenAI = Path.ChangeExtension(fileName, ".txt");
+
ConsoleExtensions.WriteLine($"Uploading file: {fileName}", ConsoleColor.DarkCyan);
- var uploadFilesResponse = await sdk.Files.FileUpload(UploadFilePurposes.UploadFilePurpose.Assistants, sampleFile, fileName);
+ var uploadFilesResponse = await sdk.Files.FileUpload(UploadFilePurposes.UploadFilePurpose.Assistants, sampleFile, fileNameForOpenAI);
if (uploadFilesResponse.Successful)
{
ConsoleExtensions.WriteLine($"{fileName} uploaded", ConsoleColor.DarkGreen);
@@ -145,8 +154,8 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
return;
}
- var uplaodFileId = uploadFilesResponse.Id;
- ConsoleExtensions.WriteLine($"uplaodFileId:{uplaodFileId}, purpose:{uploadFilesResponse.Purpose}");
+ var uploadFileId = uploadFilesResponse.Id;
+ ConsoleExtensions.WriteLine($"uploadFileId:{uploadFileId}, purpose:{uploadFilesResponse.Purpose}");
#endregion
@@ -163,7 +172,6 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
Name = "Qicha",
Model = Models.Gpt_3_5_Turbo_1106,
Tools = new List() { ToolDefinition.DefineCodeInterpreter(), ToolDefinition.DefineFileSearch(), ToolDefinition.DefineFunction(func) },
- FileIds = new List() { uplaodFileId }
});
if (assistantResult.Successful)
@@ -207,7 +215,12 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
{
Role = StaticValues.AssistantsStatics.MessageStatics.Roles.User,
Content =new("Where is Zhejiang Jiacheng Supply Chain Co., LTD."),
- Attachments = [new() { FileId = uplaodFileId }]
+ // Tools must be specified for Attachments
+ Attachments = [new()
+ {
+ FileId = uploadFileId,
+ Tools = [ ToolDefinition.DefineFileSearch() ]
+ }]
});
if (messageResult.Successful)
diff --git a/OpenAI.Playground/TestHelpers/AssistantHelpers/MessagesTestHelper.cs b/OpenAI.Playground/TestHelpers/AssistantHelpers/MessagesTestHelper.cs
index 99d0765b..e8bbf899 100644
--- a/OpenAI.Playground/TestHelpers/AssistantHelpers/MessagesTestHelper.cs
+++ b/OpenAI.Playground/TestHelpers/AssistantHelpers/MessagesTestHelper.cs
@@ -1,7 +1,9 @@
using OpenAI.Interfaces;
using OpenAI.ObjectModels;
+using OpenAI.ObjectModels.RequestModels;
using OpenAI.ObjectModels.SharedModels;
using OpenAI.Playground.ExtensionsAndHelpers;
+using static OpenAI.ObjectModels.StaticValues;
namespace OpenAI.Playground.TestHelpers.AssistantHelpers;
@@ -11,11 +13,13 @@ internal static partial class MessagesTestHelper
{
private static string? CreatedMessageId { get; set; }
private static string? CreatedThreadId { get; set; }
+ private static string? CreatedFileId { get; set; }
public static async Task RunTests(IOpenAIService openAI)
{
ConsoleExtensions.WriteLine("Message Basics Testing is starting:", ConsoleColor.Blue);
await CreateMessage(openAI);
+ await CreateMessageWithImage(openAI);
await ListMessages(openAI);
await RetrieveMessage(openAI);
await ModifyMessage(openAI);
@@ -39,7 +43,62 @@ public static async Task CreateMessage(IOpenAIService openAI)
}
CreatedThreadId = thread.Id;
- var result = await openAI.Beta.Messages.CreateMessage(CreatedThreadId, new(StaticValues.AssistantsStatics.MessageStatics.Roles.User, new("How does AI work? Explain it in simple terms.")));
+ var result = await openAI.Beta.Messages.CreateMessage(CreatedThreadId, new(AssistantsStatics.MessageStatics.Roles.User, new("How does AI work? Explain it in simple terms.")));
+ if (result.Successful)
+ {
+ CreatedMessageId = result.Id;
+ ConsoleExtensions.WriteLine($"Message Created Successfully with ID: {result.Id}", ConsoleColor.Green);
+ }
+ else
+ {
+ ConsoleExtensions.WriteError(result.Error);
+ }
+ }
+
+ public static async Task CreateMessageWithImage(IOpenAIService openAI)
+ {
+ ConsoleExtensions.WriteLine("Create MessageWithImage Testing is starting:", ConsoleColor.Cyan);
+
+ var prompt = "Tell me about this image";
+ var filename = "image_edit_original.png";
+ var filePath = $"SampleData/{filename}";
+
+ var sampleBytes = await FileExtensions.ReadAllBytesAsync(filePath);
+
+ // Upload File
+ ConsoleExtensions.WriteLine("Upload File Test", ConsoleColor.DarkCyan);
+
+ ConsoleExtensions.WriteLine($"Uploading file: {filename}", ConsoleColor.DarkCyan);
+ var uploadFilesResponse = await openAI.Files.FileUpload(UploadFilePurposes.UploadFilePurpose.Vision, sampleBytes, filename);
+ if (uploadFilesResponse.Successful)
+ {
+ ConsoleExtensions.WriteLine($"{filename} uploaded", ConsoleColor.DarkGreen);
+ }
+ else
+ {
+ ConsoleExtensions.WriteLine($"{filename} failed", ConsoleColor.DarkRed);
+ return;
+ }
+
+ var uploadFileId = uploadFilesResponse.Id;
+ ConsoleExtensions.WriteLine($"uploadFileId:{uploadFileId}, purpose:{uploadFilesResponse.Purpose}");
+
+
+ // Message.ImageFileContent
+ ConsoleExtensions.WriteLine("Message with ImageFileContent Test:", ConsoleColor.DarkCyan);
+
+ MessageContentOneOfType content = new([
+ MessageContent.TextContent(prompt),
+ MessageContent.ImageFileContent(uploadFileId, ImageStatics.ImageDetailTypes.High)
+ ]);
+
+ MessageCreateRequest request = new()
+ {
+ Role = AssistantsStatics.MessageStatics.Roles.User,
+ Content = content
+ };
+
+ var result = await openAI.Beta.Messages.CreateMessage(CreatedThreadId!, request);
if (result.Successful)
{
CreatedMessageId = result.Id;
@@ -203,6 +262,11 @@ private static async Task Cleanup(IOpenAIService sdk)
{
await sdk.Beta.Threads.ThreadDelete(CreatedThreadId);
}
+
+ if (!string.IsNullOrWhiteSpace(CreatedFileId))
+ {
+ await sdk.Files.DeleteFile(CreatedFileId);
+ }
}
}
}
\ No newline at end of file
diff --git a/OpenAI.Playground/TestHelpers/AssistantHelpers/VectorTestHelper.cs b/OpenAI.Playground/TestHelpers/AssistantHelpers/VectorTestHelper.cs
index f0ebdb84..3b313612 100644
--- a/OpenAI.Playground/TestHelpers/AssistantHelpers/VectorTestHelper.cs
+++ b/OpenAI.Playground/TestHelpers/AssistantHelpers/VectorTestHelper.cs
@@ -502,11 +502,11 @@ public static async Task CancelVectorStoreFileBatch(IOpenAIService openAI)
private static async Task Cleanup(IOpenAIService sdk)
{
-
- if (!string.IsNullOrWhiteSpace(CreatedVectorFileId))
+ if (!string.IsNullOrWhiteSpace(CreatedVectorFileId) && !string.IsNullOrWhiteSpace(CreatedVectorId))
{
await sdk.Beta.VectorStoreFiles.DeleteVectorStoreFile(CreatedVectorId, CreatedVectorFileId);
}
+
if (!string.IsNullOrWhiteSpace(CreatedFileId1))
{
await sdk.Files.DeleteFile(CreatedFileId1);
@@ -523,4 +523,4 @@ private static async Task Cleanup(IOpenAIService sdk)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/OpenAI.Playground/TestHelpers/VisionTestHelper.cs b/OpenAI.Playground/TestHelpers/VisionTestHelper.cs
index 2c92a52d..6bd33d12 100644
--- a/OpenAI.Playground/TestHelpers/VisionTestHelper.cs
+++ b/OpenAI.Playground/TestHelpers/VisionTestHelper.cs
@@ -10,34 +10,28 @@ internal static class VisionTestHelper
{
public static async Task RunSimpleVisionTest(IOpenAIService sdk)
{
- ConsoleExtensions.WriteLine("VIsion Testing is starting:", ConsoleColor.Cyan);
+ ConsoleExtensions.WriteLine("Vision Testing is starting:", ConsoleColor.Cyan);
try
{
ConsoleExtensions.WriteLine("Vision Test:", ConsoleColor.DarkCyan);
- var completionResult = await sdk.ChatCompletion.CreateCompletion(
- new ChatCompletionCreateRequest
+ var completionResult = await sdk.ChatCompletion.CreateCompletion(new()
+ {
+ Messages = new List
{
- Messages = new List
+ ChatMessage.FromSystem("You are an image analyzer assistant."),
+ ChatMessage.FromUser(new List
{
- ChatMessage.FromSystem("You are an image analyzer assistant."),
- ChatMessage.FromUser(
- new List
- {
- MessageContent.TextContent("What is on the picture in details?"),
- MessageContent.ImageUrlContent(
- "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
- ImageStatics.ImageDetailTypes.High
- )
- }
- ),
- },
- MaxTokens = 300,
- Model = Models.Gpt_4_vision_preview,
- N = 1
- }
- );
+ MessageContent.TextContent("What is on the picture in details?"),
+ MessageContent.ImageUrlContent("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
+ ImageStatics.ImageDetailTypes.High)
+ })
+ },
+ MaxTokens = 300,
+ Model = Models.Gpt_4_vision_preview,
+ N = 1
+ });
if (completionResult.Successful)
{
@@ -47,12 +41,10 @@ public static async Task RunSimpleVisionTest(IOpenAIService sdk)
{
if (completionResult.Error == null)
{
- throw new Exception("Unknown Error");
+ throw new("Unknown Error");
}
- Console.WriteLine(
- $"{completionResult.Error.Code}: {completionResult.Error.Message}"
- );
+ Console.WriteLine($"{completionResult.Error.Code}: {completionResult.Error.Message}");
}
}
catch (Exception e)
@@ -69,28 +61,22 @@ public static async Task RunSimpleVisionStreamTest(IOpenAIService sdk)
{
ConsoleExtensions.WriteLine("Vision Stream Test:", ConsoleColor.DarkCyan);
- var completionResult = sdk.ChatCompletion.CreateCompletionAsStream(
- new ChatCompletionCreateRequest
+ var completionResult = sdk.ChatCompletion.CreateCompletionAsStream(new()
+ {
+ Messages = new List
{
- Messages = new List
+ ChatMessage.FromSystem("You are an image analyzer assistant."),
+ ChatMessage.FromUser(new List
{
- ChatMessage.FromSystem("You are an image analyzer assistant."),
- ChatMessage.FromUser(
- new List
- {
- MessageContent.TextContent("What’s in this image?"),
- MessageContent.ImageUrlContent(
- "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
- ImageStatics.ImageDetailTypes.Low
- )
- }
- ),
- },
- MaxTokens = 300,
- Model = Models.Gpt_4_vision_preview,
- N = 1
- }
- );
+ MessageContent.TextContent("What’s in this image?"),
+ MessageContent.ImageUrlContent("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
+ ImageStatics.ImageDetailTypes.Low)
+ })
+ },
+ MaxTokens = 300,
+ Model = Models.Gpt_4_vision_preview,
+ N = 1
+ });
await foreach (var completion in completionResult)
{
@@ -102,12 +88,10 @@ public static async Task RunSimpleVisionStreamTest(IOpenAIService sdk)
{
if (completion.Error == null)
{
- throw new Exception("Unknown Error");
+ throw new("Unknown Error");
}
- Console.WriteLine(
- $"{completion.Error.Code}: {completion.Error.Message}"
- );
+ Console.WriteLine($"{completion.Error.Code}: {completion.Error.Message}");
}
}
@@ -127,39 +111,26 @@ public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIServi
try
{
- ConsoleExtensions.WriteLine(
- "Vision with base64 encoded image Test:",
- ConsoleColor.DarkCyan
- );
+ ConsoleExtensions.WriteLine("Vision with base64 encoded image Test:", ConsoleColor.DarkCyan);
const string originalFileName = "image_edit_original.png";
- var originalFile = await FileExtensions.ReadAllBytesAsync(
- $"SampleData/{originalFileName}"
- );
+ var originalFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{originalFileName}");
- var completionResult = await sdk.ChatCompletion.CreateCompletion(
- new ChatCompletionCreateRequest
+ var completionResult = await sdk.ChatCompletion.CreateCompletion(new()
+ {
+ Messages = new List
{
- Messages = new List
+ ChatMessage.FromSystem("You are an image analyzer assistant."),
+ ChatMessage.FromUser(new List
{
- ChatMessage.FromSystem("You are an image analyzer assistant."),
- ChatMessage.FromUser(
- new List
- {
- MessageContent.TextContent("What is on the picture in details?"),
- MessageContent.ImageBinaryContent(
- originalFile,
- ImageStatics.ImageFileTypes.Png,
- ImageStatics.ImageDetailTypes.High
- )
- }
- ),
- },
- MaxTokens = 300,
- Model = Models.Gpt_4_vision_preview,
- N = 1
- }
- );
+ MessageContent.TextContent("What is on the picture in details?"),
+ MessageContent.ImageBinaryContent(originalFile, ImageStatics.ImageFileTypes.Png, ImageStatics.ImageDetailTypes.High)
+ })
+ },
+ MaxTokens = 300,
+ Model = Models.Gpt_4_vision_preview,
+ N = 1
+ });
if (completionResult.Successful)
{
@@ -169,12 +140,10 @@ public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIServi
{
if (completionResult.Error == null)
{
- throw new Exception("Unknown Error");
+ throw new("Unknown Error");
}
- Console.WriteLine(
- $"{completionResult.Error.Code}: {completionResult.Error.Message}"
- );
+ Console.WriteLine($"{completionResult.Error.Code}: {completionResult.Error.Message}");
}
}
catch (Exception e)
@@ -183,4 +152,4 @@ public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIServi
throw;
}
}
-}
+}
\ No newline at end of file
diff --git a/OpenAI.SDK/ObjectModels/Models.cs b/OpenAI.SDK/ObjectModels/Models.cs
index 9cfb7f1f..6b170353 100644
--- a/OpenAI.SDK/ObjectModels/Models.cs
+++ b/OpenAI.SDK/ObjectModels/Models.cs
@@ -270,6 +270,7 @@ public enum Subject
public static string TextModeration007 => ModelNameBuilder(BaseModel.None, Subject.TextModeration, "007");
public static string TextModerationLatest => ModelNameBuilder(BaseModel.None, Subject.TextModeration, "latest");
public static string TextModerationStable => ModelNameBuilder(BaseModel.None, Subject.TextModeration, "stable");
+
///
/// Most capable GPT-3.5 model and optimized for chat at 1/10th the cost of text-davinci-003. Will be updated with our
/// latest model iteration.
diff --git a/OpenAI.SDK/ObjectModels/RequestModels/AssistantCreateRequest.cs b/OpenAI.SDK/ObjectModels/RequestModels/AssistantCreateRequest.cs
index 1bcdc7ca..05ac169d 100644
--- a/OpenAI.SDK/ObjectModels/RequestModels/AssistantCreateRequest.cs
+++ b/OpenAI.SDK/ObjectModels/RequestModels/AssistantCreateRequest.cs
@@ -3,7 +3,7 @@
namespace OpenAI.ObjectModels.RequestModels;
-public class AssistantCreateRequest : IOpenAiModels.IModel, IOpenAiModels.IFileIds, IOpenAiModels.IMetaData, IOpenAiModels.ITemperature
+public class AssistantCreateRequest : IOpenAiModels.IModel, IOpenAiModels.IMetaData, IOpenAiModels.ITemperature
{
///
/// The name of the assistant. The maximum length is 256 characters.
@@ -65,18 +65,12 @@ public class AssistantCreateRequest : IOpenAiModels.IModel, IOpenAiModels.IFileI
[JsonPropertyName("response_format")]
public ResponseFormatOneOfType? ResponseFormat { get; set; }
- ///
- /// A list of file IDs attached to this assistant.
- ///
- [JsonPropertyName("file_ids")]
- public List? FileIds { get; set; }
-
///
/// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information
/// about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of
/// 512 characters long.
///
- [JsonPropertyName("metadata")]
+ [JsonPropertyName("metadata")]
public Dictionary? Metadata { get; set; }
///
diff --git a/OpenAI.SDK/ObjectModels/RequestModels/AudioCreateTranscriptionRequest.cs b/OpenAI.SDK/ObjectModels/RequestModels/AudioCreateTranscriptionRequest.cs
index da6fc583..ac8e4148 100644
--- a/OpenAI.SDK/ObjectModels/RequestModels/AudioCreateTranscriptionRequest.cs
+++ b/OpenAI.SDK/ObjectModels/RequestModels/AudioCreateTranscriptionRequest.cs
@@ -42,7 +42,7 @@ public record AudioCreateTranscriptionRequest : IOpenAiModels.IModel, IOpenAiMod
public Stream? FileStream { get; set; }
///
- /// FileName
+ /// FileName.
///
public string FileName { get; set; }
diff --git a/OpenAI.SDK/ObjectModels/RequestModels/MessageCreateRequest.cs b/OpenAI.SDK/ObjectModels/RequestModels/MessageCreateRequest.cs
index 1f164d2e..aad15c49 100644
--- a/OpenAI.SDK/ObjectModels/RequestModels/MessageCreateRequest.cs
+++ b/OpenAI.SDK/ObjectModels/RequestModels/MessageCreateRequest.cs
@@ -49,10 +49,11 @@ public MessageCreateRequest(string role, MessageContentOneOfType content, List
- /// The ID of the file to attach to
+ /// The ID of the file to attach. See list of supported file extensions .
///
[JsonPropertyName("file_id")]
public string FileId { get; set; }
+
///
/// The tools to add this file to.
///
diff --git a/OpenAI.SDK/ObjectModels/RequestModels/VisionImageUrl.cs b/OpenAI.SDK/ObjectModels/RequestModels/VisionImageUrl.cs
index 973cb69e..2175ccf3 100644
--- a/OpenAI.SDK/ObjectModels/RequestModels/VisionImageUrl.cs
+++ b/OpenAI.SDK/ObjectModels/RequestModels/VisionImageUrl.cs
@@ -41,7 +41,7 @@ public class MessageImageFile
///
/// The File ID of the image in the message content. Set purpose="vision" when uploading the File if you need to later display the file content.
///
- [JsonPropertyName("file_Id")]
+ [JsonPropertyName("file_id")]
public string FileId { get; set; }
///
/// Specifies the detail level of the image if specified by the user. low uses fewer tokens, you can opt in to high resolution using high.
diff --git a/OpenAI.SDK/ObjectModels/UploadFilePurposes.cs b/OpenAI.SDK/ObjectModels/UploadFilePurposes.cs
index 0204e952..41b331d5 100644
--- a/OpenAI.SDK/ObjectModels/UploadFilePurposes.cs
+++ b/OpenAI.SDK/ObjectModels/UploadFilePurposes.cs
@@ -1,5 +1,12 @@
namespace OpenAI.ObjectModels;
+///
+/// The intended purpose of the uploaded file.
+/// Use "assistants" for Assistants and Message files, "vision" for Assistants image file inputs, "batch" for Batch
+/// API, and "fine-tune" for Fine-tuning.
+/// Upload File Purposes
+/// Upload File Purpose Responses
+///
public static class UploadFilePurposes
{
public enum UploadFilePurpose
@@ -7,19 +14,31 @@ public enum UploadFilePurpose
FineTune,
FineTuneResults,
Assistants,
+ AssistantsOutput,
+ Vision,
+ Batch,
+ BatchOutput,
}
+ public const string Assistants = "assistants";
+ public const string AssistantsOutput = "assistants_output";
+ public const string Vision = "vision";
+ public const string Batch = "batch";
+ public const string BatchOutput = "batch_output";
public const string FineTune = "fine-tune";
public const string FineTuneResults = "fine-tune-results";
- public const string Assistants = "assistants";
public static string EnumToString(this UploadFilePurpose uploadFilePurpose)
{
return uploadFilePurpose switch
{
+ UploadFilePurpose.Assistants => Assistants,
+ UploadFilePurpose.Vision => Vision,
+ UploadFilePurpose.Batch => Batch,
UploadFilePurpose.FineTune => FineTune,
+ UploadFilePurpose.BatchOutput => BatchOutput,
+ UploadFilePurpose.AssistantsOutput => AssistantsOutput,
UploadFilePurpose.FineTuneResults => FineTuneResults,
- UploadFilePurpose.Assistants => Assistants,
_ => throw new ArgumentOutOfRangeException(nameof(uploadFilePurpose), uploadFilePurpose, null)
};
}
@@ -28,9 +47,13 @@ public static UploadFilePurpose ToEnum(string filePurpose)
{
return filePurpose switch
{
+ Assistants => UploadFilePurpose.Assistants,
+ Vision => UploadFilePurpose.Vision,
+ Batch => UploadFilePurpose.Batch,
FineTune => UploadFilePurpose.FineTune,
+ BatchOutput => UploadFilePurpose.BatchOutput,
+ AssistantsOutput => UploadFilePurpose.AssistantsOutput,
FineTuneResults => UploadFilePurpose.FineTuneResults,
- Assistants => UploadFilePurpose.Assistants,
_ => throw new ArgumentOutOfRangeException(nameof(filePurpose), filePurpose, null)
};
}
diff --git a/OpenAI.sln b/OpenAI.sln
index 96e37dd2..1a061e10 100644
--- a/OpenAI.sln
+++ b/OpenAI.sln
@@ -25,7 +25,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OpenAI.Utilities", "OpenAI.
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OpenAI.UtilitiesPlayground", "OpenAI.UtilitiesPlayground\OpenAI.UtilitiesPlayground.csproj", "{A2CEF336-DE84-41A2-880E-84B2871FF929}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OpenAI.Utilities.Tests", "OpenAI.Utilities.Tests\OpenAI.Utilities.Tests.csproj", "{8BC7E997-8591-4ED4-BA4A-486B21419A3E}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OpenAI.Utilities.Tests", "OpenAI.Utilities.Tests\OpenAI.Utilities.Tests.csproj", "{8BC7E997-8591-4ED4-BA4A-486B21419A3E}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution