diff --git a/sdk/openai/Azure.AI.OpenAI/CHANGELOG.md b/sdk/openai/Azure.AI.OpenAI/CHANGELOG.md index 65ac4ae4477aa..6004ba05af73e 100644 --- a/sdk/openai/Azure.AI.OpenAI/CHANGELOG.md +++ b/sdk/openai/Azure.AI.OpenAI/CHANGELOG.md @@ -6,6 +6,64 @@ ### Breaking Changes +This update includes a number of version-to-version breaking changes to the API. + +#### `deploymentOrModelName` moved to `*Options.DeploymentName` + +`deploymentOrModelName` and related method parameters on `OpenAIClient` have been moved to `DeploymentName` +properties in the corresponding method options. This is intended to promote consistency across scenario, +language, and Azure/non-Azure OpenAI use. + +As an example, the following: + +```csharp +ChatCompletionsOptions chatCompletionsOptions = new() +{ + Messages = { new(ChatRole.User, "Hello, assistant!") }, +}; +Response response = client.GetChatCompletions("gpt-4", chatCompletionsOptions); +``` + +...is now re-written as: + +```csharp +ChatCompletionsOptions chatCompletionsOptions = new() +{ + DeploymentName = "gpt-4", + Messages = { new(ChatRole.User, "Hello, assistant!") }, +}; +Response response = client.GetChatCompletions(chatCompletionsOptions); +``` + +#### Consistency in complex method options type constructors + +With the migration of `DeploymentName` into method complex options types, these options types have now been snapped to +follow a common pattern: each complex options type will feature a default constructor that allows `init`-style setting +of properties as well as a single additional constructor that accepts *all* required parameters for the corresponding +method. Existing constructors that no longer meet that "all" requirement, including those impacted by the addition of +`DeploymentName`, have been removed. The "convenience" constructors that represented required parameter data +differently -- for example, `EmbeddingsOptions(string)`, have also been removed in favor of the consistent "set of +directly provide" choice. + +More exhaustively, *removed* are: + +- `AudioTranscriptionOptions(BinaryData)` +- `AudioTranslationOptions(BinaryData)` +- `ChatCompletionsOptions(IEnumerable)` +- `CompletionsOptions(IEnumerable)` +- `EmbeddingsOptions(string)` +- `EmbeddingsOptions(IEnumerable)` + +And *added* as replacements are: + +- `AudioTranscriptionOptions(string, BinaryData)` +- `AudioTranslationOptions(string, BinaryData)` +- `ChatCompletionsOptions(string, IEnumerable)` +- `CompletionsOptions(string, IEnumerable)` +- `EmbeddingsOptions(string, IEnumerable)` + +#### Embeddings + - Changed the representation of embeddings from `IReadOnlyList` to `ReadOnlyMemory`. ### Bugs Fixed diff --git a/sdk/openai/Azure.AI.OpenAI/README.md b/sdk/openai/Azure.AI.OpenAI/README.md index bf361355609e3..c807fc82a35f4 100644 --- a/sdk/openai/Azure.AI.OpenAI/README.md +++ b/sdk/openai/Azure.AI.OpenAI/README.md @@ -74,9 +74,11 @@ OpenAIClient client = useAzureOpenAI new AzureKeyCredential("your-azure-openai-resource-api-key")) : new OpenAIClient("your-api-key-from-platform.openai.com"); -Response response = await client.GetCompletionsAsync( - "text-davinci-003", // assumes a matching model deployment or model name - "Hello, world!"); +Response response = await client.GetCompletionsAsync(new CompletionsOptions() +{ + DeploymentName = "text-davinci-003", // assumes a matching model deployment or model name + Prompts = { "Hello, world!" }, +}); foreach (Choice choice in response.Value.Choices) { @@ -111,11 +113,13 @@ The `GenerateChatbotResponse` method authenticates using a DefaultAzureCredentia string endpoint = "https://myaccount.openai.azure.com/"; var client = new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()); -string deploymentName = "text-davinci-003"; -string prompt = "What is Azure OpenAI?"; -Console.Write($"Input: {prompt}"); +CompletionsOptions completionsOptions = new() +{ + DeploymentName = "text-davinci-003", + Prompts = { "What is Azure OpenAI?" }, +}; -Response completionsResponse = client.GetCompletions(deploymentName, prompt); +Response completionsResponse = client.GetCompletions(completionsOptions); string completion = completionsResponse.Value.Choices[0].Text; Console.WriteLine($"Chatbot: {completion}"); ``` @@ -130,25 +134,24 @@ string key = "YOUR_AZURE_OPENAI_KEY"; string endpoint = "https://myaccount.openai.azure.com/"; var client = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(key)); -List examplePrompts = new(){ - "How are you today?", - "What is Azure OpenAI?", - "Why do children love dinosaurs?", - "Generate a proof of Euler's identity", - "Describe in single words only the good things that come into your mind about your mother.", +CompletionsOptions completionsOptions = new() +{ + DeploymentName = "text-davinci-003", + Prompts = + { + "How are you today?", + "What is Azure OpenAI?", + "Why do children love dinosaurs?", + "Generate a proof of Euler's identity", + "Describe in single words only the good things that come into your mind about your mother." + }, }; -string deploymentName = "text-davinci-003"; +Response completionsResponse = client.GetCompletions(completionsOptions); -foreach (string prompt in examplePrompts) +foreach (Choice choice in completionsResponse.Value.Choices) { - Console.Write($"Input: {prompt}"); - CompletionsOptions completionsOptions = new CompletionsOptions(); - completionsOptions.Prompts.Add(prompt); - - Response completionsResponse = client.GetCompletions(deploymentName, completionsOptions); - string completion = completionsResponse.Value.Choices[0].Text; - Console.WriteLine($"Chatbot: {completion}"); + Console.WriteLine($"Response for prompt {choice.Index}: {choice.Text}"); } ``` @@ -180,12 +183,11 @@ string summarizationPrompt = @$" Console.Write($"Input: {summarizationPrompt}"); var completionsOptions = new CompletionsOptions() { + DeploymentName = "text-davinci-003", Prompts = { summarizationPrompt }, }; -string deploymentName = "text-davinci-003"; - -Response completionsResponse = client.GetCompletions(deploymentName, completionsOptions); +Response completionsResponse = client.GetCompletions(completionsOptions); string completion = completionsResponse.Value.Choices[0].Text; Console.WriteLine($"Summarization: {completion}"); ``` @@ -197,6 +199,7 @@ string nonAzureOpenAIApiKey = "your-api-key-from-platform.openai.com"; var client = new OpenAIClient(nonAzureOpenAIApiKey, new OpenAIClientOptions()); var chatCompletionsOptions = new ChatCompletionsOptions() { + DeploymentName = "gpt-3.5-turbo", // Use DeploymentName for "model" with non-Azure clients Messages = { new ChatMessage(ChatRole.System, "You are a helpful assistant. You will talk like a pirate."), @@ -206,9 +209,8 @@ var chatCompletionsOptions = new ChatCompletionsOptions() } }; -Response response = await client.GetChatCompletionsStreamingAsync( - deploymentOrModelName: "gpt-3.5-turbo", - chatCompletionsOptions); +Response response + = await client.GetChatCompletionsStreamingAsync(chatCompletionsOptions); using StreamingChatCompletions streamingChatCompletions = response.Value; await foreach (StreamingChatChoice choice in streamingChatCompletions.GetChoicesStreaming()) @@ -274,16 +276,17 @@ var conversationMessages = new List() new(ChatRole.User, "What is the weather like in Boston?"), }; -var chatCompletionsOptions = new ChatCompletionsOptions(); +var chatCompletionsOptions = new ChatCompletionsOptions() +{ + DeploymentName = "gpt-35-turbo-0613", +}; foreach (ChatMessage chatMessage in conversationMessages) { chatCompletionsOptions.Messages.Add(chatMessage); } chatCompletionsOptions.Functions.Add(getWeatherFuntionDefinition); -Response response = await client.GetChatCompletionsAsync( - "gpt-35-turbo-0613", - chatCompletionsOptions); +Response response = await client.GetChatCompletionsAsync(chatCompletionsOptions); ``` If the model determines that it should call a Chat Function, a finish reason of 'FunctionCall' will be populated on @@ -345,6 +348,7 @@ See [the Azure OpenAI using your own data quickstart](https://learn.microsoft.co ```C# Snippet:ChatUsingYourOwnData var chatCompletionsOptions = new ChatCompletionsOptions() { + DeploymentName = "gpt-35-turbo-0613", Messages = { new ChatMessage( @@ -368,9 +372,7 @@ var chatCompletionsOptions = new ChatCompletionsOptions() } } }; -Response response = await client.GetChatCompletionsAsync( - "gpt-35-turbo-0613", - chatCompletionsOptions); +Response response = await client.GetChatCompletionsAsync(chatCompletionsOptions); ChatMessage message = response.Value.Choices[0].Message; // The final, data-informed response still appears in the ChatMessages as usual Console.WriteLine($"{message.Role}: {message.Content}"); @@ -388,9 +390,12 @@ foreach (ChatMessage contextMessage in message.AzureExtensionsContext.Messages) ### Generate embeddings ```C# Snippet:GenerateEmbeddings -string deploymentOrModelName = "text-embedding-ada-002"; -EmbeddingsOptions embeddingsOptions = new("Your text string goes here"); -Response response = await client.GetEmbeddingsAsync(deploymentOrModelName, embeddingsOptions); +EmbeddingsOptions embeddingsOptions = new() +{ + DeploymentName = "text-embedding-ada-002", + Input = { "Your text string goes here" }, +}; +Response response = await client.GetEmbeddingsAsync(embeddingsOptions); // The response includes the generated embedding. EmbeddingItem item = response.Value.Data[0]; @@ -418,13 +423,13 @@ using Stream audioStreamFromFile = File.OpenRead("myAudioFile.mp3"); var transcriptionOptions = new AudioTranscriptionOptions() { + DeploymentName = "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI AudioData = BinaryData.FromStream(audioStreamFromFile), ResponseFormat = AudioTranscriptionFormat.Verbose, }; -Response transcriptionResponse = await client.GetAudioTranscriptionAsync( - deploymentId: "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI - transcriptionOptions); +Response transcriptionResponse + = await client.GetAudioTranscriptionAsync(transcriptionOptions); AudioTranscription transcription = transcriptionResponse.Value; // When using Simple, SRT, or VTT formats, only transcription.Text will be populated @@ -439,13 +444,12 @@ using Stream audioStreamFromFile = File.OpenRead("mySpanishAudioFile.mp3"); var translationOptions = new AudioTranslationOptions() { + DeploymentName = "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI AudioData = BinaryData.FromStream(audioStreamFromFile), ResponseFormat = AudioTranslationFormat.Verbose, }; -Response translationResponse = await client.GetAudioTranslationAsync( - deploymentId: "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI - translationOptions); +Response translationResponse = await client.GetAudioTranslationAsync(translationOptions); AudioTranslation translation = translationResponse.Value; // When using Simple, SRT, or VTT formats, only translation.Text will be populated diff --git a/sdk/openai/Azure.AI.OpenAI/api/Azure.AI.OpenAI.netstandard2.0.cs b/sdk/openai/Azure.AI.OpenAI/api/Azure.AI.OpenAI.netstandard2.0.cs index bfddd9f4fe519..c6148d0cf1042 100644 --- a/sdk/openai/Azure.AI.OpenAI/api/Azure.AI.OpenAI.netstandard2.0.cs +++ b/sdk/openai/Azure.AI.OpenAI/api/Azure.AI.OpenAI.netstandard2.0.cs @@ -31,8 +31,9 @@ internal AudioTranscription() { } public partial class AudioTranscriptionOptions { public AudioTranscriptionOptions() { } - public AudioTranscriptionOptions(System.BinaryData audioData) { } + public AudioTranscriptionOptions(string deploymentName, System.BinaryData audioData) { } public System.BinaryData AudioData { get { throw null; } set { } } + public string DeploymentName { get { throw null; } set { } } public string Language { get { throw null; } set { } } public string Prompt { get { throw null; } set { } } public Azure.AI.OpenAI.AudioTranscriptionFormat? ResponseFormat { get { throw null; } set { } } @@ -83,8 +84,9 @@ internal AudioTranslation() { } public partial class AudioTranslationOptions { public AudioTranslationOptions() { } - public AudioTranslationOptions(System.BinaryData audioData) { } + public AudioTranslationOptions(string deploymentName, System.BinaryData audioData) { } public System.BinaryData AudioData { get { throw null; } set { } } + public string DeploymentName { get { throw null; } set { } } public string Prompt { get { throw null; } set { } } public Azure.AI.OpenAI.AudioTranslationFormat? ResponseFormat { get { throw null; } set { } } public float? Temperature { get { throw null; } set { } } @@ -229,9 +231,10 @@ internal ChatCompletions() { } public partial class ChatCompletionsOptions { public ChatCompletionsOptions() { } - public ChatCompletionsOptions(System.Collections.Generic.IEnumerable messages) { } + public ChatCompletionsOptions(string deploymentName, System.Collections.Generic.IEnumerable messages) { } public Azure.AI.OpenAI.AzureChatExtensionsOptions AzureExtensionsOptions { get { throw null; } set { } } public int? ChoiceCount { get { throw null; } set { } } + public string DeploymentName { get { throw null; } set { } } public float? FrequencyPenalty { get { throw null; } set { } } public Azure.AI.OpenAI.FunctionDefinition FunctionCall { get { throw null; } set { } } public System.Collections.Generic.IList Functions { get { throw null; } set { } } @@ -324,8 +327,9 @@ internal CompletionsLogProbabilityModel() { } public partial class CompletionsOptions { public CompletionsOptions() { } - public CompletionsOptions(System.Collections.Generic.IEnumerable prompts) { } + public CompletionsOptions(string deploymentName, System.Collections.Generic.IEnumerable prompts) { } public int? ChoicesPerPrompt { get { throw null; } set { } } + public string DeploymentName { get { throw null; } set { } } public bool? Echo { get { throw null; } set { } } public float? FrequencyPenalty { get { throw null; } set { } } public int? GenerationSampleCount { get { throw null; } set { } } @@ -396,9 +400,9 @@ internal Embeddings() { } public partial class EmbeddingsOptions { public EmbeddingsOptions() { } - public EmbeddingsOptions(System.Collections.Generic.IEnumerable input) { } - public EmbeddingsOptions(string input) { } - public System.Collections.Generic.IList Input { get { throw null; } } + public EmbeddingsOptions(string deploymentName, System.Collections.Generic.IEnumerable input) { } + public string DeploymentName { get { throw null; } set { } } + public System.Collections.Generic.IList Input { get { throw null; } set { } } public string User { get { throw null; } set { } } } public partial class EmbeddingsUsage @@ -472,22 +476,20 @@ public OpenAIClient(System.Uri endpoint, Azure.AzureKeyCredential keyCredential, public OpenAIClient(System.Uri endpoint, Azure.Core.TokenCredential tokenCredential) { } public OpenAIClient(System.Uri endpoint, Azure.Core.TokenCredential tokenCredential, Azure.AI.OpenAI.OpenAIClientOptions options) { } public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } } - public virtual Azure.Response GetAudioTranscription(string deploymentId, Azure.AI.OpenAI.AudioTranscriptionOptions audioTranscriptionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetAudioTranscriptionAsync(string deploymentId, Azure.AI.OpenAI.AudioTranscriptionOptions audioTranscriptionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response GetAudioTranslation(string deploymentId, Azure.AI.OpenAI.AudioTranslationOptions audioTranslationOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetAudioTranslationAsync(string deploymentId, Azure.AI.OpenAI.AudioTranslationOptions audioTranslationOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response GetChatCompletions(string deploymentOrModelName, Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetChatCompletionsAsync(string deploymentOrModelName, Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response GetChatCompletionsStreaming(string deploymentOrModelName, Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetChatCompletionsStreamingAsync(string deploymentOrModelName, Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response GetCompletions(string deploymentOrModelName, Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response GetCompletions(string deploymentOrModelName, string prompt, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetCompletionsAsync(string deploymentOrModelName, Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetCompletionsAsync(string deploymentOrModelName, string prompt, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response GetCompletionsStreaming(string deploymentOrModelName, Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetCompletionsStreamingAsync(string deploymentOrModelName, Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual Azure.Response GetEmbeddings(string deploymentOrModelName, Azure.AI.OpenAI.EmbeddingsOptions embeddingsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> GetEmbeddingsAsync(string deploymentOrModelName, Azure.AI.OpenAI.EmbeddingsOptions embeddingsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetAudioTranscription(Azure.AI.OpenAI.AudioTranscriptionOptions audioTranscriptionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAudioTranscriptionAsync(Azure.AI.OpenAI.AudioTranscriptionOptions audioTranscriptionOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetAudioTranslation(Azure.AI.OpenAI.AudioTranslationOptions audioTranslationOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetAudioTranslationAsync(Azure.AI.OpenAI.AudioTranslationOptions audioTranslationOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetChatCompletions(Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetChatCompletionsAsync(Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetChatCompletionsStreaming(Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetChatCompletionsStreamingAsync(Azure.AI.OpenAI.ChatCompletionsOptions chatCompletionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetCompletions(Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetCompletionsAsync(Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetCompletionsStreaming(Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetCompletionsStreamingAsync(Azure.AI.OpenAI.CompletionsOptions completionsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetEmbeddings(Azure.AI.OpenAI.EmbeddingsOptions embeddingsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> GetEmbeddingsAsync(Azure.AI.OpenAI.EmbeddingsOptions embeddingsOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response GetImageGenerations(Azure.AI.OpenAI.ImageGenerationOptions imageGenerationOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task> GetImageGenerationsAsync(Azure.AI.OpenAI.ImageGenerationOptions imageGenerationOptions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } diff --git a/sdk/openai/Azure.AI.OpenAI/assets.json b/sdk/openai/Azure.AI.OpenAI/assets.json index 9b85d4de7bc32..70d05d961a746 100644 --- a/sdk/openai/Azure.AI.OpenAI/assets.json +++ b/sdk/openai/Azure.AI.OpenAI/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "net", "TagPrefix": "net/openai/Azure.AI.OpenAI", - "Tag": "net/openai/Azure.AI.OpenAI_52e82965d8" + "Tag": "net/openai/Azure.AI.OpenAI_41a964dc8d" } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom.Suppressions/OpenAIClient.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom.Suppressions/OpenAIClient.cs index 63b090689b041..889aada048212 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom.Suppressions/OpenAIClient.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom.Suppressions/OpenAIClient.cs @@ -10,10 +10,18 @@ namespace Azure.AI.OpenAI { [CodeGenSuppress("GetCompletions", typeof(string), typeof(RequestContent), typeof(RequestContext))] [CodeGenSuppress("GetCompletionsAsync", typeof(string), typeof(RequestContent), typeof(RequestContext))] + [CodeGenSuppress("GetCompletions", typeof(string), typeof(CompletionsOptions), typeof(CancellationToken))] + [CodeGenSuppress("GetCompletionsAsync", typeof(string), typeof(CompletionsOptions), typeof(CancellationToken))] + [CodeGenSuppress("GetCompletionsStreaming", typeof(string), typeof(CompletionsOptions), typeof(CancellationToken))] + [CodeGenSuppress("GetCompletionsStreamingAsync", typeof(string), typeof(CompletionsOptions), typeof(CancellationToken))] [CodeGenSuppress("GetChatCompletions", typeof(string), typeof(RequestContent), typeof(RequestContext))] [CodeGenSuppress("GetChatCompletionsAsync", typeof(string), typeof(RequestContent), typeof(RequestContext))] + [CodeGenSuppress("GetChatCompletions", typeof(string), typeof(ChatCompletionsOptions), typeof(CancellationToken))] + [CodeGenSuppress("GetChatCompletionsAsync", typeof(string), typeof(ChatCompletionsOptions), typeof(CancellationToken))] [CodeGenSuppress("GetEmbeddings", typeof(string), typeof(RequestContent), typeof(RequestContext))] [CodeGenSuppress("GetEmbeddingsAsync", typeof(string), typeof(RequestContent), typeof(RequestContext))] + [CodeGenSuppress("GetEmbeddings", typeof(string), typeof(EmbeddingsOptions), typeof(CancellationToken))] + [CodeGenSuppress("GetEmbeddingsAsync", typeof(string), typeof(EmbeddingsOptions), typeof(CancellationToken))] [CodeGenSuppress("GetChatCompletionsWithAzureExtensions", typeof(string), typeof(RequestContent), typeof(RequestContext))] [CodeGenSuppress("GetChatCompletionsWithAzureExtensions", typeof(string), typeof(ChatCompletionsOptions), typeof(CancellationToken))] [CodeGenSuppress("GetChatCompletionsWithAzureExtensionsAsync", typeof(string), typeof(RequestContent), typeof(RequestContext))] diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.Serialization.cs index 1dc3ecb666d12..19ad40fdcd6e7 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.Serialization.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.Serialization.cs @@ -13,7 +13,7 @@ public partial class AudioTranscriptionOptions internal virtual RequestContent ToRequestContent() { var content = new MultipartFormDataRequestContent(); - content.Add(new StringContent(InternalNonAzureModelName), "model"); + content.Add(new StringContent(DeploymentName), "model"); content.Add(new ByteArrayContent(AudioData.ToArray()), "file", "@file.wav"); if (Optional.IsDefined(ResponseFormat)) { diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.cs index 8c21c1180cc4a..58f022b4d6b59 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranscriptionOptions.cs @@ -4,9 +4,11 @@ #nullable disable using System; +using Azure.Core; namespace Azure.AI.OpenAI { + [CodeGenSuppress("AudioTranscriptionOptions", typeof(BinaryData))] public partial class AudioTranscriptionOptions { /// @@ -28,10 +30,44 @@ public partial class AudioTranscriptionOptions /// public BinaryData AudioData { get; set; } + /// + /// Gets or sets the deployment name to use for a transcription request. + /// + /// + /// + /// When making a request against Azure OpenAI, this should be the customizable name of the "model deployment" + /// (example: my-gpt4-deployment) and not the name of the model itself (example: gpt-4). + /// + /// + /// When using non-Azure OpenAI, this corresponds to "model" in the request options and should use the + /// appropriate name of the model (example: gpt-4). + /// + /// + [CodeGenMember("InternalNonAzureModelName")] + public string DeploymentName { get; set; } + + /// + /// Creates a new instance of . + /// + /// The deployment name to use for audio transcription. + /// The audio data to transcribe. + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public AudioTranscriptionOptions(string deploymentName, BinaryData audioData) + { + Argument.AssertNotNullOrEmpty(deploymentName, nameof(deploymentName)); + Argument.AssertNotNull(audioData, nameof(audioData)); + + DeploymentName = deploymentName; + AudioData = audioData; + } + /// Initializes a new instance of AudioTranscriptionOptions. public AudioTranscriptionOptions() { } - - internal string InternalNonAzureModelName { get; set; } } } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.Serialization.cs index ad1f88925c413..8f8fd714929cf 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.Serialization.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.Serialization.cs @@ -13,7 +13,7 @@ public partial class AudioTranslationOptions internal virtual RequestContent ToRequestContent() { var content = new MultipartFormDataRequestContent(); - content.Add(new StringContent(InternalNonAzureModelName), "model"); + content.Add(new StringContent(DeploymentName), "model"); content.Add(new ByteArrayContent(AudioData.ToArray()), "file", "@file.wav"); if (Optional.IsDefined(ResponseFormat)) { diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.cs index 38eb7dfd56114..25d6d5a16faf3 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/AudioTranslationOptions.cs @@ -4,10 +4,11 @@ #nullable disable using System; +using Azure.Core; namespace Azure.AI.OpenAI { - /// The configuration information for an audio translation request. + [CodeGenSuppress("AudioTranslationOptions", typeof(BinaryData))] public partial class AudioTranslationOptions { /// @@ -29,7 +30,41 @@ public partial class AudioTranslationOptions /// public BinaryData AudioData { get; set; } - internal string InternalNonAzureModelName { get; set; } + /// + /// Gets or sets the deployment name to use for a translation request. + /// + /// + /// + /// When making a request against Azure OpenAI, this should be the customizable name of the "model deployment" + /// (example: my-gpt4-deployment) and not the name of the model itself (example: gpt-4). + /// + /// + /// When using non-Azure OpenAI, this corresponds to "model" in the request options and should use the + /// appropriate name of the model (example: gpt-4). + /// + /// + [CodeGenMember("InternalNonAzureModelName")] + public string DeploymentName { get; set; } + + /// + /// Creates a new instance of . + /// + /// The deployment name to use for audio translation. + /// The audio data to translate. + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public AudioTranslationOptions(string deploymentName, BinaryData audioData) + { + Argument.AssertNotNullOrEmpty(deploymentName, nameof(deploymentName)); + Argument.AssertNotNull(audioData, nameof(audioData)); + + DeploymentName = deploymentName; + AudioData = audioData; + } /// /// Initializes a new instance of AudioTranslationOptions. diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.Serialization.cs index 07707a77300b5..e75de69621567 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.Serialization.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.Serialization.cs @@ -17,6 +17,11 @@ public partial class ChatCompletionsOptions : IUtf8JsonSerializable void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) { writer.WriteStartObject(); + if (Optional.IsDefined(DeploymentName)) + { + writer.WritePropertyName("model"u8); + writer.WriteStringValue(DeploymentName); + } writer.WritePropertyName("messages"u8); writer.WriteStartArray(); foreach (var item in Messages) @@ -167,11 +172,6 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WriteNull("stream"); } } - if (Optional.IsDefined(InternalNonAzureModelName)) - { - writer.WritePropertyName("model"u8); - writer.WriteStringValue(InternalNonAzureModelName); - } if (AzureExtensionsOptions != null) { // CUSTOM CODE NOTE: Extensions options currently deserialize directly into the payload (not as a diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.cs index eaafb9b534c87..ad3c4e85e5cb1 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/ChatCompletionsOptions.cs @@ -9,14 +9,28 @@ namespace Azure.AI.OpenAI { - /// - /// The configuration information used for a chat completions request. - /// + [CodeGenSuppress("ChatCompletionsOptions", typeof(IEnumerable))] public partial class ChatCompletionsOptions { /// public int? ChoiceCount { get; set; } + /// + /// Gets or sets the deployment name to use for a chat completions request. + /// + /// + /// + /// When making a request against Azure OpenAI, this should be the customizable name of the "model deployment" + /// (example: my-gpt4-deployment) and not the name of the model itself (example: gpt-4). + /// + /// + /// When using non-Azure OpenAI, this corresponds to "model" in the request options and should use the + /// appropriate name of the model (example: gpt-4). + /// + /// + [CodeGenMember("InternalNonAzureModelName")] + public string DeploymentName { get; set; } + /// public float? FrequencyPenalty { get; set; } @@ -78,29 +92,37 @@ public partial class ChatCompletionsOptions // CUSTOM CODE NOTE: the following properties are forward declared here as internal as their behavior is // otherwise handled in the custom implementation. internal IList InternalAzureExtensionsDataSources { get; set; } - internal string InternalNonAzureModelName { get; set; } internal bool? InternalShouldStreamResponse { get; set; } internal IDictionary InternalStringKeyedTokenSelectionBiases { get; } /// Initializes a new instance of ChatCompletionsOptions. + /// The deployment name to use for chat completions. /// /// The collection of context messages associated with this chat completions request. /// Typical usage begins with a chat message for the System role that provides instructions for /// the behavior of the assistant, followed by alternating messages between the User and /// Assistant roles. /// - /// is null. - public ChatCompletionsOptions(IEnumerable messages) : this() + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public ChatCompletionsOptions(string deploymentName, IEnumerable messages) : this() { + Argument.AssertNotNullOrEmpty(deploymentName, nameof(deploymentName)); Argument.AssertNotNull(messages, nameof(messages)); + DeploymentName = deploymentName; + foreach (ChatMessage chatMessage in messages) { Messages.Add(chatMessage); } } - /// + /// Initializes a new instance of ChatCompletionsOptions. public ChatCompletionsOptions() { // CUSTOM CODE NOTE: Empty constructors are added to options classes to facilitate property-only use; this diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/Choice.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/Choice.cs index e53ed4b2e9d44..91890116a6aeb 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/Choice.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/Choice.cs @@ -20,7 +20,7 @@ public partial class Choice /// The ordered index associated with this completions choice. /// The log probabilities model for tokens associated with this completions choice. /// Reason for finishing. - /// or is null. + /// is null. internal Choice(string text, int index, CompletionsLogProbabilityModel logProbabilityModel, CompletionsFinishReason finishReason) { Argument.AssertNotNull(text, nameof(text)); diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.Serialization.cs index ff58248147bdf..a635cbfc621a0 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.Serialization.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.Serialization.cs @@ -3,6 +3,7 @@ #nullable disable +using System.Collections.Generic; using System.Text.Json; using Azure.Core; @@ -14,62 +15,36 @@ public partial class CompletionsOptions : IUtf8JsonSerializable void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) { writer.WriteStartObject(); - // CUSTOM: only serialize if prompts is non-empty - if (Optional.IsCollectionDefined(Prompts) && Prompts.Count > 0) + writer.WritePropertyName("prompt"u8); + writer.WriteStartArray(); + foreach (var item in Prompts) { - writer.WritePropertyName("prompt"u8); - writer.WriteStartArray(); - foreach (var item in Prompts) - { - writer.WriteStringValue(item); - } - writer.WriteEndArray(); + writer.WriteStringValue(item); } + writer.WriteEndArray(); if (Optional.IsDefined(MaxTokens)) { - if (MaxTokens != null) - { - writer.WritePropertyName("max_tokens"u8); - writer.WriteNumberValue(MaxTokens.Value); - } - else - { - writer.WriteNull("max_tokens"); - } + writer.WritePropertyName("max_tokens"u8); + writer.WriteNumberValue(MaxTokens.Value); } if (Optional.IsDefined(Temperature)) { - if (Temperature != null) - { - writer.WritePropertyName("temperature"u8); - writer.WriteNumberValue(Temperature.Value); - } - else - { - writer.WriteNull("temperature"); - } + writer.WritePropertyName("temperature"u8); + writer.WriteNumberValue(Temperature.Value); } if (Optional.IsDefined(NucleusSamplingFactor)) { - if (NucleusSamplingFactor != null) - { - writer.WritePropertyName("top_p"u8); - writer.WriteNumberValue(NucleusSamplingFactor.Value); - } - else - { - writer.WriteNull("top_p"); - } + writer.WritePropertyName("top_p"u8); + writer.WriteNumberValue(NucleusSamplingFactor.Value); } - // CUSTOM: serialize to if (Optional.IsCollectionDefined(TokenSelectionBiases)) { writer.WritePropertyName("logit_bias"u8); writer.WriteStartObject(); - foreach (var item in TokenSelectionBiases) + foreach (KeyValuePair keyValuePair in TokenSelectionBiases) { - writer.WritePropertyName($"{item.Key}"); - writer.WriteNumberValue(item.Value); + writer.WritePropertyName($"{keyValuePair.Key}"); + writer.WriteNumberValue(keyValuePair.Value); } writer.WriteEndObject(); } @@ -80,39 +55,18 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) } if (Optional.IsDefined(ChoicesPerPrompt)) { - if (ChoicesPerPrompt != null) - { - writer.WritePropertyName("n"u8); - writer.WriteNumberValue(ChoicesPerPrompt.Value); - } - else - { - writer.WriteNull("n"); - } + writer.WritePropertyName("n"u8); + writer.WriteNumberValue(ChoicesPerPrompt.Value); } if (Optional.IsDefined(LogProbabilityCount)) { - if (LogProbabilityCount != null) - { - writer.WritePropertyName("logprobs"u8); - writer.WriteNumberValue(LogProbabilityCount.Value); - } - else - { - writer.WriteNull("logprobs"); - } + writer.WritePropertyName("logprobs"u8); + writer.WriteNumberValue(LogProbabilityCount.Value); } if (Optional.IsDefined(Echo)) { - if (Echo != null) - { - writer.WritePropertyName("echo"u8); - writer.WriteBooleanValue(Echo.Value); - } - else - { - writer.WriteNull("echo"); - } + writer.WritePropertyName("echo"u8); + writer.WriteBooleanValue(Echo.Value); } if (Optional.IsCollectionDefined(StopSequences)) { @@ -126,56 +80,28 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) } if (Optional.IsDefined(PresencePenalty)) { - if (PresencePenalty != null) - { - writer.WritePropertyName("presence_penalty"u8); - writer.WriteNumberValue(PresencePenalty.Value); - } - else - { - writer.WriteNull("presence_penalty"); - } + writer.WritePropertyName("presence_penalty"u8); + writer.WriteNumberValue(PresencePenalty.Value); } if (Optional.IsDefined(FrequencyPenalty)) { - if (FrequencyPenalty != null) - { - writer.WritePropertyName("frequency_penalty"u8); - writer.WriteNumberValue(FrequencyPenalty.Value); - } - else - { - writer.WriteNull("frequency_penalty"); - } + writer.WritePropertyName("frequency_penalty"u8); + writer.WriteNumberValue(FrequencyPenalty.Value); } if (Optional.IsDefined(GenerationSampleCount)) { - if (GenerationSampleCount != null) - { - writer.WritePropertyName("best_of"u8); - writer.WriteNumberValue(GenerationSampleCount.Value); - } - else - { - writer.WriteNull("best_of"); - } + writer.WritePropertyName("best_of"u8); + writer.WriteNumberValue(GenerationSampleCount.Value); } if (Optional.IsDefined(InternalShouldStreamResponse)) { - if (InternalShouldStreamResponse != null) - { - writer.WritePropertyName("stream"u8); - writer.WriteBooleanValue(InternalShouldStreamResponse.Value); - } - else - { - writer.WriteNull("stream"); - } + writer.WritePropertyName("stream"u8); + writer.WriteBooleanValue(InternalShouldStreamResponse.Value); } - if (Optional.IsDefined(InternalNonAzureModelName)) + if (Optional.IsDefined(DeploymentName)) { writer.WritePropertyName("model"u8); - writer.WriteStringValue(InternalNonAzureModelName); + writer.WriteStringValue(DeploymentName); } writer.WriteEndObject(); } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.cs index 6a63ffe0f0c30..6ab47fca5a719 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/CompletionsOptions.cs @@ -10,6 +10,7 @@ namespace Azure.AI.OpenAI { + [CodeGenSuppress("CompletionsOptions", typeof(IEnumerable))] public partial class CompletionsOptions { /// @@ -24,6 +25,22 @@ public partial class CompletionsOptions /// public int? ChoicesPerPrompt { get; set; } + /// + /// Gets or sets the deployment name to use for a completions request. + /// + /// + /// + /// When making a request against Azure OpenAI, this should be the customizable name of the "model deployment" + /// (example: my-gpt4-deployment) and not the name of the model itself (example: gpt-4). + /// + /// + /// When using non-Azure OpenAI, this corresponds to "model" in the request options and should use the + /// appropriate name of the model (example: gpt-4). + /// + /// + [CodeGenMember("InternalNonAzureModelName")] + public string DeploymentName { get; set; } + /// /// Gets or sets a value specifying whether a completion should include its input prompt as a prefix to /// its generated output. @@ -131,8 +148,6 @@ public partial class CompletionsOptions /// public float? Temperature { get; set; } - internal IDictionary InternalStringKeyedTokenSelectionBiases { get; } - /// /// Gets a dictionary of modifications to the likelihood of specified GPT tokens appearing in a completions /// result. Maps token IDs to associated bias scores from -100 to 100, with minimum and maximum values @@ -147,26 +162,36 @@ public partial class CompletionsOptions public IDictionary TokenSelectionBiases { get; } internal bool? InternalShouldStreamResponse { get; set; } - internal string InternalNonAzureModelName { get; set; } + + internal IDictionary InternalStringKeyedTokenSelectionBiases { get; } /// Initializes a new instance of CompletionsOptions. + /// The deployment name to use for this request. /// The prompts to generate completions from. - /// is null. - public CompletionsOptions(IEnumerable prompts) + /// + /// is an empty string. + /// + /// + /// or is null. + /// + public CompletionsOptions(string deploymentName, IEnumerable prompts) + : this() { + Argument.AssertNotNullOrEmpty(deploymentName, nameof(deploymentName)); Argument.AssertNotNull(prompts, nameof(prompts)); Prompts = prompts.ToList(); - TokenSelectionBiases = new ChangeTrackingDictionary(); - StopSequences = new ChangeTrackingList(); } /// Initializes a new instance of CompletionsOptions. public CompletionsOptions() - : this(new ChangeTrackingList()) { // CUSTOM CODE NOTE: Empty constructors are added to options classes to facilitate property-only use; this // may be reconsidered for required payload constituents in the future. + Prompts = new ChangeTrackingList(); + InternalStringKeyedTokenSelectionBiases = new ChangeTrackingDictionary(); + TokenSelectionBiases = new ChangeTrackingDictionary(); + StopSequences = new ChangeTrackingList(); } } } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/EmbeddingsOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/EmbeddingsOptions.cs index f6f9c2ed3920e..3774ea6cc0351 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/EmbeddingsOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/EmbeddingsOptions.cs @@ -4,22 +4,63 @@ #nullable disable using System; +using System.Collections.Generic; +using System.Linq; using Azure.Core; namespace Azure.AI.OpenAI { - /// Schema to create a prompt completion from a deployment. + [CodeGenSuppress("EmbeddingsOptions", typeof(IEnumerable))] public partial class EmbeddingsOptions { - internal string InternalNonAzureModelName { get; set; } + /// + /// Gets or sets the deployment name to use for an embeddings request. + /// + /// + /// + /// When making a request against Azure OpenAI, this should be the customizable name of the "model deployment" + /// (example: my-gpt4-deployment) and not the name of the model itself (example: gpt-4). + /// + /// + /// When using non-Azure OpenAI, this corresponds to "model" in the request options and should use the + /// appropriate name of the model (example: gpt-4). + /// + /// + [CodeGenMember("InternalNonAzureModelName")] + public string DeploymentName { get; set; } - /// - public EmbeddingsOptions(string input) - : this(new string[] { input }) + /// + /// Input texts to get embeddings for, encoded as a an array of strings. + /// Each input must not exceed 2048 tokens in length. + /// + /// Unless you are embedding code, we suggest replacing newlines (\n) in your input with a single space, + /// as we have observed inferior results when newlines are present. + /// + public IList Input { get; set; } = new ChangeTrackingList(); + + /// + /// Creates a new instance of . + /// + /// The deployment name to use for embeddings. + /// The collection of inputs to run an embeddings operation across. + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public EmbeddingsOptions(string deploymentName, IEnumerable input) { + Argument.AssertNotNullOrEmpty(deploymentName, nameof(deploymentName)); + Argument.AssertNotNull(input, nameof(input)); + + DeploymentName = deploymentName; + Input = input.ToList(); } - /// + /// + /// Creates a new instance of . + /// public EmbeddingsOptions() { // CUSTOM CODE NOTE: Empty constructors are added to options classes to facilitate property-only use; this diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/ImageGenerationOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/ImageGenerationOptions.cs index e4aba4f2eb785..f98776f67f551 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/ImageGenerationOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/ImageGenerationOptions.cs @@ -5,7 +5,6 @@ namespace Azure.AI.OpenAI { - /// Represents the request data used to generate images. public partial class ImageGenerationOptions { /// Initializes a new instance of ImageGenerationOptions. diff --git a/sdk/openai/Azure.AI.OpenAI/src/Custom/OpenAIClient.cs b/sdk/openai/Azure.AI.OpenAI/src/Custom/OpenAIClient.cs index e46691d42ae88..5402e2a498455 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Custom/OpenAIClient.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Custom/OpenAIClient.cs @@ -13,7 +13,6 @@ namespace Azure.AI.OpenAI { public partial class OpenAIClient { - private const int DefaultMaxCompletionsTokens = 100; private const string PublicOpenAIApiVersion = "1"; private const string PublicOpenAIEndpoint = $"https://api.openai.com/v{PublicOpenAIApiVersion}"; @@ -127,29 +126,26 @@ public OpenAIClient(string openAIApiKey) } /// Return textual completions as configured for a given prompt. - /// - /// Specifies either the model deployment name (when using Azure OpenAI) or model name (when using - /// non-Azure OpenAI) to use for this request. - /// /// /// The options for this completions request. /// /// The cancellation token to use. /// - /// or is null. + /// or is null. + /// + /// + /// is an empty string. /// public virtual Response GetCompletions( - string deploymentOrModelName, CompletionsOptions completionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(completionsOptions, nameof(completionsOptions)); + Argument.AssertNotNullOrEmpty(completionsOptions.DeploymentName, nameof(completionsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetCompletions"); scope.Start(); - completionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; completionsOptions.InternalShouldStreamResponse = null; RequestContent content = completionsOptions.ToRequestContent(); @@ -157,7 +153,7 @@ public virtual Response GetCompletions( try { - using HttpMessage message = CreatePostRequestMessage(deploymentOrModelName, "completions", content, context); + using HttpMessage message = CreatePostRequestMessage(completionsOptions, content, context); Response response = _pipeline.ProcessMessage(message, context, cancellationToken); return Response.FromValue(Completions.FromResponse(response), response); } @@ -168,30 +164,27 @@ public virtual Response GetCompletions( } } - /// - public virtual Response GetCompletions( - string deploymentOrModelName, - string prompt, - CancellationToken cancellationToken = default) - { - Argument.AssertNotNull(prompt, nameof(prompt)); - CompletionsOptions simpleOptions = GetDefaultCompletionsOptions(prompt); - return GetCompletions(deploymentOrModelName, simpleOptions, cancellationToken); - } - - /// + /// Return textual completions as configured for a given prompt. + /// + /// The options for this completions request. + /// + /// The cancellation token to use. + /// + /// or is null. + /// + /// + /// is an empty string. + /// public virtual async Task> GetCompletionsAsync( - string deploymentOrModelName, CompletionsOptions completionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(completionsOptions, nameof(completionsOptions)); + Argument.AssertNotNullOrEmpty(completionsOptions.DeploymentName, nameof(completionsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetCompletions"); scope.Start(); - completionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; completionsOptions.InternalShouldStreamResponse = null; RequestContent content = completionsOptions.ToRequestContent(); @@ -199,7 +192,7 @@ public virtual async Task> GetCompletionsAsync( try { - using HttpMessage message = CreatePostRequestMessage(deploymentOrModelName, "completions", content, context); + using HttpMessage message = CreatePostRequestMessage(completionsOptions, content, context); Response response = await _pipeline.ProcessMessageAsync(message, context, cancellationToken) .ConfigureAwait(false); return Response.FromValue(Completions.FromResponse(response), response); @@ -211,48 +204,32 @@ public virtual async Task> GetCompletionsAsync( } } - /// - public virtual Task> GetCompletionsAsync( - string deploymentOrModelName, - string prompt, - CancellationToken cancellationToken = default) - { - Argument.AssertNotNull(prompt, nameof(prompt)); - CompletionsOptions simpleOptions = GetDefaultCompletionsOptions(prompt); - return GetCompletionsAsync(deploymentOrModelName, simpleOptions, cancellationToken); - } - /// /// Begin a completions request and get an object that can stream response data as it becomes available. /// - /// - /// - /// /// the chat completions options for this completions request. /// /// a cancellation token that can be used to cancel the initial request or ongoing streaming operation. /// /// - /// or is null. + /// or is null. + /// + /// + /// is an empty string. /// - /// Service returned a non-success status code. /// /// A response that, if the request was successful, includes a instance. /// public virtual Response GetCompletionsStreaming( - string deploymentOrModelName, CompletionsOptions completionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(completionsOptions, nameof(completionsOptions)); + Argument.AssertNotNullOrEmpty(completionsOptions.DeploymentName, nameof(completionsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetCompletionsStreaming"); scope.Start(); - completionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; completionsOptions.InternalShouldStreamResponse = true; RequestContent content = completionsOptions.ToRequestContent(); @@ -261,11 +238,7 @@ public virtual Response GetCompletionsStreaming( try { // Response value object takes IDisposable ownership of message - HttpMessage message = CreatePostRequestMessage( - deploymentOrModelName, - "completions", - content, - context); + HttpMessage message = CreatePostRequestMessage(completionsOptions, content, context); message.BufferResponse = false; Response baseResponse = _pipeline.ProcessMessage(message, context, cancellationToken); return Response.FromValue(new StreamingCompletions(baseResponse), baseResponse); @@ -277,16 +250,29 @@ public virtual Response GetCompletionsStreaming( } } - /// + /// + /// Begin a completions request and get an object that can stream response data as it becomes available. + /// + /// the chat completions options for this completions request. + /// + /// a cancellation token that can be used to cancel the initial request or ongoing streaming operation. + /// + /// + /// or is null. + /// + /// + /// is an empty string. + /// + /// + /// A response that, if the request was successful, includes a instance. + /// public virtual async Task> GetCompletionsStreamingAsync( - string deploymentOrModelName, CompletionsOptions completionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(completionsOptions, nameof(completionsOptions)); + Argument.AssertNotNullOrEmpty(completionsOptions.DeploymentName, nameof(completionsOptions.DeploymentName)); - completionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; completionsOptions.InternalShouldStreamResponse = true; using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetCompletionsStreaming"); @@ -298,11 +284,7 @@ public virtual async Task> GetCompletionsStreamin try { // Response value object takes IDisposable ownership of message - HttpMessage message = CreatePostRequestMessage( - deploymentOrModelName, - "completions", - content, - context); + HttpMessage message = CreatePostRequestMessage(completionsOptions, content, context); message.BufferResponse = false; Response baseResponse = await _pipeline.ProcessMessageAsync(message, context, cancellationToken) .ConfigureAwait(false); @@ -316,42 +298,32 @@ public virtual async Task> GetCompletionsStreamin } /// Get chat completions for provided chat context messages. - /// - /// - /// /// The options for this chat completions request. /// The cancellation token to use. /// - /// or is null. + /// or is null. + /// + /// + /// is an empty string. /// public virtual Response GetChatCompletions( - string deploymentOrModelName, ChatCompletionsOptions chatCompletionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(chatCompletionsOptions, nameof(chatCompletionsOptions)); + Argument.AssertNotNullOrEmpty(chatCompletionsOptions.DeploymentName, nameof(chatCompletionsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetChatCompletions"); scope.Start(); - chatCompletionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; chatCompletionsOptions.InternalShouldStreamResponse = null; - string operationPath = GetOperationPath(chatCompletionsOptions); - RequestContent content = chatCompletionsOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); try { - using HttpMessage message = CreatePostRequestMessage( - deploymentOrModelName, - operationPath, - content, - context); + using HttpMessage message = CreatePostRequestMessage(chatCompletionsOptions, content, context); Response response = _pipeline.ProcessMessage(message, context, cancellationToken); return Response.FromValue(ChatCompletions.FromResponse(response), response); } @@ -362,33 +334,33 @@ public virtual Response GetChatCompletions( } } - /// + /// Get chat completions for provided chat context messages. + /// The options for this chat completions request. + /// The cancellation token to use. + /// + /// or is null. + /// + /// + /// is an empty string. + /// public virtual async Task> GetChatCompletionsAsync( - string deploymentOrModelName, ChatCompletionsOptions chatCompletionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(chatCompletionsOptions, nameof(chatCompletionsOptions)); + Argument.AssertNotNullOrEmpty(chatCompletionsOptions.DeploymentName, nameof(chatCompletionsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetChatCompletions"); scope.Start(); - chatCompletionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; chatCompletionsOptions.InternalShouldStreamResponse = null; - string operationPath = GetOperationPath(chatCompletionsOptions); - RequestContent content = chatCompletionsOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); try { - using HttpMessage message = CreatePostRequestMessage( - deploymentOrModelName, - operationPath, - content, - context); + using HttpMessage message = CreatePostRequestMessage(chatCompletionsOptions, content, context); Response response = await _pipeline.ProcessMessageAsync(message, context, cancellationToken) .ConfigureAwait(false); return Response.FromValue(ChatCompletions.FromResponse(response), response); @@ -404,11 +376,6 @@ public virtual async Task> GetChatCompletionsAsync( /// Begin a chat completions request and get an object that can stream response data as it becomes /// available. /// - /// - /// - /// /// /// the chat completions options for this chat completions request. /// @@ -416,37 +383,31 @@ public virtual async Task> GetChatCompletionsAsync( /// a cancellation token that can be used to cancel the initial request or ongoing streaming operation. /// /// - /// or is null. + /// or is null. + /// + /// + /// is an empty string. /// - /// Service returned a non-success status code. /// The response returned from the service. public virtual Response GetChatCompletionsStreaming( - string deploymentOrModelName, ChatCompletionsOptions chatCompletionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(chatCompletionsOptions, nameof(chatCompletionsOptions)); + Argument.AssertNotNullOrEmpty(chatCompletionsOptions.DeploymentName, nameof(chatCompletionsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetChatCompletionsStreaming"); scope.Start(); - chatCompletionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; chatCompletionsOptions.InternalShouldStreamResponse = true; - string operationPath = GetOperationPath(chatCompletionsOptions); - RequestContent content = chatCompletionsOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); try { // Response value object takes IDisposable ownership of message - HttpMessage message = CreatePostRequestMessage( - deploymentOrModelName, - operationPath, - content, - context); + HttpMessage message = CreatePostRequestMessage(chatCompletionsOptions, content, context); message.BufferResponse = false; Response baseResponse = _pipeline.ProcessMessage(message, context, cancellationToken); return Response.FromValue(new StreamingChatCompletions(baseResponse), baseResponse); @@ -458,34 +419,42 @@ public virtual Response GetChatCompletionsStreaming( } } - /// + /// + /// Begin a chat completions request and get an object that can stream response data as it becomes + /// available. + /// + /// + /// the chat completions options for this chat completions request. + /// + /// + /// a cancellation token that can be used to cancel the initial request or ongoing streaming operation. + /// + /// + /// or is null. + /// + /// + /// is an empty string. + /// + /// The response returned from the service. public virtual async Task> GetChatCompletionsStreamingAsync( - string deploymentOrModelName, ChatCompletionsOptions chatCompletionsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(chatCompletionsOptions, nameof(chatCompletionsOptions)); + Argument.AssertNotNullOrEmpty(chatCompletionsOptions.DeploymentName, nameof(chatCompletionsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetChatCompletionsStreaming"); scope.Start(); - chatCompletionsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; chatCompletionsOptions.InternalShouldStreamResponse = true; - string operationPath = GetOperationPath(chatCompletionsOptions); - RequestContent content = chatCompletionsOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); try { // Response value object takes IDisposable ownership of message - HttpMessage message = CreatePostRequestMessage( - deploymentOrModelName, - operationPath, - content, - context); + HttpMessage message = CreatePostRequestMessage(chatCompletionsOptions, content, context); message.BufferResponse = false; Response baseResponse = await _pipeline.ProcessMessageAsync( message, @@ -501,38 +470,30 @@ public virtual async Task> GetChatCompletions } /// Return the computed embeddings for a given prompt. - /// - /// - /// /// The options for this embeddings request. /// The cancellation token to use. /// - /// or is null. + /// or is null. /// /// - /// is an empty string and was expected to be non-empty. + /// is an empty string. /// public virtual Response GetEmbeddings( - string deploymentOrModelName, EmbeddingsOptions embeddingsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNullOrEmpty(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(embeddingsOptions, nameof(embeddingsOptions)); + Argument.AssertNotNullOrEmpty(embeddingsOptions.DeploymentName, nameof(embeddingsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetEmbeddings"); scope.Start(); - embeddingsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; - RequestContent content = embeddingsOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); try { - HttpMessage message = CreatePostRequestMessage(deploymentOrModelName, "embeddings", content, context); + HttpMessage message = CreatePostRequestMessage(embeddingsOptions, content, context); Response response = _pipeline.ProcessMessage(message, context, cancellationToken); return Response.FromValue(Embeddings.FromResponse(response), response); } @@ -543,26 +504,31 @@ public virtual Response GetEmbeddings( } } - /// + /// Return the computed embeddings for a given prompt. + /// The options for this embeddings request. + /// The cancellation token to use. + /// + /// or is null. + /// + /// + /// is an empty string. + /// public virtual async Task> GetEmbeddingsAsync( - string deploymentOrModelName, EmbeddingsOptions embeddingsOptions, CancellationToken cancellationToken = default) { - Argument.AssertNotNullOrEmpty(deploymentOrModelName, nameof(deploymentOrModelName)); Argument.AssertNotNull(embeddingsOptions, nameof(embeddingsOptions)); + Argument.AssertNotNullOrEmpty(embeddingsOptions.DeploymentName, nameof(embeddingsOptions.DeploymentName)); using DiagnosticScope scope = ClientDiagnostics.CreateScope("OpenAIClient.GetEmbeddings"); scope.Start(); - embeddingsOptions.InternalNonAzureModelName = _isConfiguredForAzureOpenAI ? null : deploymentOrModelName; - RequestContent content = embeddingsOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); try { - HttpMessage message = CreatePostRequestMessage(deploymentOrModelName, "embeddings", content, context); + HttpMessage message = CreatePostRequestMessage(embeddingsOptions, content, context); Response response = await _pipeline.ProcessMessageAsync(message, context, cancellationToken) .ConfigureAwait(false); return Response.FromValue(Embeddings.FromResponse(response), response); @@ -584,6 +550,9 @@ public virtual async Task> GetEmbeddingsAsync( /// /// An optional cancellation token that may be used to abort an ongoing request. /// + /// + /// is null. + /// /// /// The response information for the image generations request. /// @@ -643,6 +612,9 @@ Operation imagesOperation /// /// An optional cancellation token that may be used to abort an ongoing request. /// + /// + /// is null. + /// /// /// The response information for the image generations request. /// @@ -695,32 +667,35 @@ Operation imagesOperation } /// Transcribes audio into the input language. - /// Specifies either the model deployment name (when using Azure OpenAI) or model name (when using non-Azure OpenAI) to use for this request. /// /// Transcription request. /// Requesting format 'json' will result on only the 'text' field being set. /// For more output data use 'verbose_json. /// /// The cancellation token to use. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public virtual async Task> GetAudioTranscriptionAsync(string deploymentId, AudioTranscriptionOptions audioTranscriptionOptions, CancellationToken cancellationToken = default) + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public virtual async Task> GetAudioTranscriptionAsync( + AudioTranscriptionOptions audioTranscriptionOptions, + CancellationToken cancellationToken = default) { - Argument.AssertNotNullOrEmpty(deploymentId, nameof(deploymentId)); Argument.AssertNotNull(audioTranscriptionOptions, nameof(audioTranscriptionOptions)); + Argument.AssertNotNullOrEmpty(audioTranscriptionOptions.DeploymentName, nameof(audioTranscriptionOptions.DeploymentName)); using var scope = ClientDiagnostics.CreateScope("OpenAIClient.GetAudioTranscription"); scope.Start(); - audioTranscriptionOptions.InternalNonAzureModelName = deploymentId; - RequestContent content = audioTranscriptionOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); Response rawResponse = default; try { - using HttpMessage message = CreateGetAudioTranscriptionRequest(deploymentId, content, context); + using HttpMessage message = CreateGetAudioTranscriptionRequest(audioTranscriptionOptions, content, context); rawResponse = await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); } catch (Exception e) @@ -733,32 +708,35 @@ public virtual async Task> GetAudioTranscriptionAsy } /// Transcribes audio into the input language. - /// Specifies either the model deployment name (when using Azure OpenAI) or model name (when using non-Azure OpenAI) to use for this request. /// /// Transcription request. /// Requesting format 'json' will result on only the 'text' field being set. /// For more output data use 'verbose_json. /// /// The cancellation token to use. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public virtual Response GetAudioTranscription(string deploymentId, AudioTranscriptionOptions audioTranscriptionOptions, CancellationToken cancellationToken = default) + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public virtual Response GetAudioTranscription( + AudioTranscriptionOptions audioTranscriptionOptions, + CancellationToken cancellationToken = default) { - Argument.AssertNotNullOrEmpty(deploymentId, nameof(deploymentId)); Argument.AssertNotNull(audioTranscriptionOptions, nameof(audioTranscriptionOptions)); + Argument.AssertNotNullOrEmpty(audioTranscriptionOptions.DeploymentName, nameof(audioTranscriptionOptions.DeploymentName)); using var scope = ClientDiagnostics.CreateScope("OpenAIClient.GetAudioTranscription"); scope.Start(); - audioTranscriptionOptions.InternalNonAzureModelName = deploymentId; - RequestContent content = audioTranscriptionOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); Response rawResponse = default; try { - using HttpMessage message = CreateGetAudioTranscriptionRequest(deploymentId, content, context); + using HttpMessage message = CreateGetAudioTranscriptionRequest(audioTranscriptionOptions, content, context); rawResponse = _pipeline.ProcessMessage(message, context); } catch (Exception e) @@ -771,35 +749,35 @@ public virtual Response GetAudioTranscription(string deploym } /// Transcribes and translates input audio into English text. - /// Specifies either the model deployment name (when using Azure OpenAI) or model name (when using non-Azure OpenAI) to use for this request. /// /// Translation request. /// Requesting format 'json' will result on only the 'text' field being set. /// For more output data use 'verbose_json. /// /// The cancellation token to use. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public virtual async Task> GetAudioTranslationAsync(string deploymentId, AudioTranslationOptions audioTranslationOptions, CancellationToken cancellationToken = default) + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public virtual async Task> GetAudioTranslationAsync( + AudioTranslationOptions audioTranslationOptions, + CancellationToken cancellationToken = default) { - Argument.AssertNotNullOrEmpty(deploymentId, nameof(deploymentId)); Argument.AssertNotNull(audioTranslationOptions, nameof(audioTranslationOptions)); - - // Custom code: merely linking the deployment ID (== model name) into the request body for non-Azure use - audioTranslationOptions.InternalNonAzureModelName = deploymentId; + Argument.AssertNotNullOrEmpty(audioTranslationOptions.DeploymentName, nameof(audioTranslationOptions.DeploymentName)); using var scope = ClientDiagnostics.CreateScope("OpenAIClient.GetAudioTranslation"); scope.Start(); - audioTranslationOptions.InternalNonAzureModelName = deploymentId; - RequestContent content = audioTranslationOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); Response rawResponse = default; try { - using HttpMessage message = CreateGetAudioTranslationRequest(deploymentId, content, context); + using HttpMessage message = CreateGetAudioTranslationRequest(audioTranslationOptions, content, context); rawResponse = await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); } catch (Exception e) @@ -812,32 +790,35 @@ public virtual async Task> GetAudioTranslationAsync(s } /// Transcribes and translates input audio into English text. - /// Specifies either the model deployment name (when using Azure OpenAI) or model name (when using non-Azure OpenAI) to use for this request. /// /// Translation request. /// Requesting format 'json' will result on only the 'text' field being set. /// For more output data use 'verbose_json. /// /// The cancellation token to use. - /// or is null. - /// is an empty string, and was expected to be non-empty. - public virtual Response GetAudioTranslation(string deploymentId, AudioTranslationOptions audioTranslationOptions, CancellationToken cancellationToken = default) + /// + /// or is null. + /// + /// + /// is an empty string. + /// + public virtual Response GetAudioTranslation( + AudioTranslationOptions audioTranslationOptions, + CancellationToken cancellationToken = default) { - Argument.AssertNotNullOrEmpty(deploymentId, nameof(deploymentId)); Argument.AssertNotNull(audioTranslationOptions, nameof(audioTranslationOptions)); + Argument.AssertNotNullOrEmpty(audioTranslationOptions.DeploymentName, nameof(audioTranslationOptions.DeploymentName)); using var scope = ClientDiagnostics.CreateScope("OpenAIClient.GetAudioTranslation"); scope.Start(); - audioTranslationOptions.InternalNonAzureModelName = deploymentId; - RequestContent content = audioTranslationOptions.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); Response rawResponse = default; try { - using HttpMessage message = CreateGetAudioTranslationRequest(deploymentId, content, context); + using HttpMessage message = CreateGetAudioTranslationRequest(audioTranslationOptions, content, context); rawResponse = _pipeline.ProcessMessage(message, context); } catch (Exception e) @@ -868,6 +849,29 @@ internal RequestUriBuilder GetUri(string deploymentOrModelName, string operation return uri; } + internal HttpMessage CreatePostRequestMessage( + CompletionsOptions completionsOptions, + RequestContent content, + RequestContext context) + => CreatePostRequestMessage(completionsOptions.DeploymentName, "completions", content, context); + + internal HttpMessage CreatePostRequestMessage( + ChatCompletionsOptions chatCompletionsOptions, + RequestContent content, + RequestContext context) + { + string operationPath = chatCompletionsOptions.AzureExtensionsOptions != null + ? "extensions/chat/completions" + : "chat/completions"; + return CreatePostRequestMessage(chatCompletionsOptions.DeploymentName, operationPath, content, context); + } + + internal HttpMessage CreatePostRequestMessage( + EmbeddingsOptions embeddingsOptions, + RequestContent content, + RequestContext context) + => CreatePostRequestMessage(embeddingsOptions.DeploymentName, "embeddings", content, context); + internal HttpMessage CreatePostRequestMessage( string deploymentOrModelName, string operationPath, @@ -890,41 +894,30 @@ private static TokenCredential CreateDelegatedToken(string token) return DelegatedTokenCredential.Create((_, _) => accessToken); } - private static CompletionsOptions GetDefaultCompletionsOptions(string prompt) - { - return new CompletionsOptions() - { - Prompts = - { - prompt, - }, - MaxTokens = DefaultMaxCompletionsTokens, - }; - } - - private static string GetOperationPath(ChatCompletionsOptions chatCompletionsOptions) - => chatCompletionsOptions.AzureExtensionsOptions != null - ? "extensions/chat/completions" - : "chat/completions"; - - internal HttpMessage CreateGetAudioTranscriptionRequest(string deploymentId, RequestContent content, RequestContext context) + internal HttpMessage CreateGetAudioTranscriptionRequest( + AudioTranscriptionOptions audioTranscriptionOptions, + RequestContent content, + RequestContext context) { HttpMessage message = _pipeline.CreateMessage(context, ResponseClassifier200); Request request = message.Request; request.Method = RequestMethod.Post; - request.Uri = GetUri(deploymentId, "audio/transcriptions"); + request.Uri = GetUri(audioTranscriptionOptions.DeploymentName, "audio/transcriptions"); request.Content = content; string boundary = (content as MultipartFormDataRequestContent).Boundary; request.Headers.Add("content-type", $"multipart/form-data; boundary={boundary}"); return message; } - internal HttpMessage CreateGetAudioTranslationRequest(string deploymentId, RequestContent content, RequestContext context) + internal HttpMessage CreateGetAudioTranslationRequest( + AudioTranslationOptions audioTranslationOptions, + RequestContent content, + RequestContext context) { HttpMessage message = _pipeline.CreateMessage(context, ResponseClassifier200); Request request = message.Request; request.Method = RequestMethod.Post; - request.Uri = GetUri(deploymentId, "audio/translations"); + request.Uri = GetUri(audioTranslationOptions.DeploymentName, "audio/translations"); request.Content = content; string boundary = (content as MultipartFormDataRequestContent).Boundary; request.Headers.Add("content-type", $"multipart/form-data; boundary={boundary}"); diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.Serialization.cs index 4fad75e9040b1..e70c36c2e4cc8 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.Serialization.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.Serialization.cs @@ -37,10 +37,10 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("temperature"u8); writer.WriteNumberValue(Temperature.Value); } - if (Optional.IsDefined(InternalNonAzureModelName)) + if (Optional.IsDefined(DeploymentName)) { writer.WritePropertyName("model"u8); - writer.WriteStringValue(InternalNonAzureModelName); + writer.WriteStringValue(DeploymentName); } writer.WriteEndObject(); } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs index 38fc1b9a3ff4f..f5460a934f133 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranscriptionOptions.cs @@ -13,19 +13,6 @@ namespace Azure.AI.OpenAI /// The configuration information for an audio transcription request. public partial class AudioTranscriptionOptions { - /// Initializes a new instance of AudioTranscriptionOptions. - /// - /// The audio data to transcribe. This must be the binary content of a file in one of the supported media formats: - /// flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, webm. - /// - /// is null. - public AudioTranscriptionOptions(BinaryData audioData) - { - Argument.AssertNotNull(audioData, nameof(audioData)); - - AudioData = audioData; - } - /// Initializes a new instance of AudioTranscriptionOptions. /// /// The audio data to transcribe. This must be the binary content of a file in one of the supported media formats: @@ -46,15 +33,15 @@ public AudioTranscriptionOptions(BinaryData audioData) /// Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. /// If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit. /// - /// The model to use for this transcription request. - internal AudioTranscriptionOptions(BinaryData audioData, AudioTranscriptionFormat? responseFormat, string language, string prompt, float? temperature, string internalNonAzureModelName) + /// The model to use for this transcription request. + internal AudioTranscriptionOptions(BinaryData audioData, AudioTranscriptionFormat? responseFormat, string language, string prompt, float? temperature, string deploymentName) { AudioData = audioData; ResponseFormat = responseFormat; Language = language; Prompt = prompt; Temperature = temperature; - InternalNonAzureModelName = internalNonAzureModelName; + DeploymentName = deploymentName; } /// The requested format of the transcription response data, which will influence the content and detail of the result. public AudioTranscriptionFormat? ResponseFormat { get; set; } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.Serialization.cs index a7f43534234a9..90b5da9c23a96 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.Serialization.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.Serialization.cs @@ -32,10 +32,10 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("temperature"u8); writer.WriteNumberValue(Temperature.Value); } - if (Optional.IsDefined(InternalNonAzureModelName)) + if (Optional.IsDefined(DeploymentName)) { writer.WritePropertyName("model"u8); - writer.WriteStringValue(InternalNonAzureModelName); + writer.WriteStringValue(DeploymentName); } writer.WriteEndObject(); } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs index fe60b53dca163..0cf8ee9d3ce55 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/AudioTranslationOptions.cs @@ -13,19 +13,6 @@ namespace Azure.AI.OpenAI /// The configuration information for an audio translation request. public partial class AudioTranslationOptions { - /// Initializes a new instance of AudioTranslationOptions. - /// - /// The audio data to translate. This must be the binary content of a file in one of the supported media formats: - /// flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, webm. - /// - /// is null. - public AudioTranslationOptions(BinaryData audioData) - { - Argument.AssertNotNull(audioData, nameof(audioData)); - - AudioData = audioData; - } - /// Initializes a new instance of AudioTranslationOptions. /// /// The audio data to translate. This must be the binary content of a file in one of the supported media formats: @@ -41,14 +28,14 @@ public AudioTranslationOptions(BinaryData audioData) /// Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. /// If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit. /// - /// The model to use for this translation request. - internal AudioTranslationOptions(BinaryData audioData, AudioTranslationFormat? responseFormat, string prompt, float? temperature, string internalNonAzureModelName) + /// The model to use for this translation request. + internal AudioTranslationOptions(BinaryData audioData, AudioTranslationFormat? responseFormat, string prompt, float? temperature, string deploymentName) { AudioData = audioData; ResponseFormat = responseFormat; Prompt = prompt; Temperature = temperature; - InternalNonAzureModelName = internalNonAzureModelName; + DeploymentName = deploymentName; } /// The requested format of the translation response data, which will influence the content and detail of the result. public AudioTranslationFormat? ResponseFormat { get; set; } diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/ChatCompletionsOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/ChatCompletionsOptions.cs index 55a3999e41d28..da714f6e56733 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Generated/ChatCompletionsOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/ChatCompletionsOptions.cs @@ -80,7 +80,7 @@ public partial class ChatCompletionsOptions /// decrease the likelihood of the model repeating the same statements verbatim. /// /// A value indicating whether chat completions should be streamed for this request. - /// + /// /// The model name to provide as part of this completions request. /// Not applicable to Azure OpenAI, where deployment information should be included in the Azure /// resource URI that's connected to. @@ -89,7 +89,7 @@ public partial class ChatCompletionsOptions /// The configuration entries for Azure OpenAI chat extensions that use them. /// This additional specification is only compatible with Azure OpenAI. /// - internal ChatCompletionsOptions(IList messages, IList functions, FunctionDefinition functionCall, int? maxTokens, float? temperature, float? nucleusSamplingFactor, IDictionary internalStringKeyedTokenSelectionBiases, string user, int? choiceCount, IList stopSequences, float? presencePenalty, float? frequencyPenalty, bool? internalShouldStreamResponse, string internalNonAzureModelName, IList internalAzureExtensionsDataSources) + internal ChatCompletionsOptions(IList messages, IList functions, FunctionDefinition functionCall, int? maxTokens, float? temperature, float? nucleusSamplingFactor, IDictionary internalStringKeyedTokenSelectionBiases, string user, int? choiceCount, IList stopSequences, float? presencePenalty, float? frequencyPenalty, bool? internalShouldStreamResponse, string deploymentName, IList internalAzureExtensionsDataSources) { Messages = messages; Functions = functions; @@ -104,7 +104,7 @@ internal ChatCompletionsOptions(IList messages, IList /// A value indicating whether chat completions should be streamed for this request. - /// + /// /// The model name to provide as part of this completions request. /// Not applicable to Azure OpenAI, where deployment information should be included in the Azure /// resource URI that's connected to. /// - internal CompletionsOptions(IList prompts, int? maxTokens, float? temperature, float? nucleusSamplingFactor, IDictionary internalStringKeyedTokenSelectionBiases, string user, int? choicesPerPrompt, int? logProbabilityCount, bool? echo, IList stopSequences, float? presencePenalty, float? frequencyPenalty, int? generationSampleCount, bool? internalShouldStreamResponse, string internalNonAzureModelName) + internal CompletionsOptions(IList prompts, int? maxTokens, float? temperature, float? nucleusSamplingFactor, IDictionary internalStringKeyedTokenSelectionBiases, string user, int? choicesPerPrompt, int? logProbabilityCount, bool? echo, IList stopSequences, float? presencePenalty, float? frequencyPenalty, int? generationSampleCount, bool? internalShouldStreamResponse, string deploymentName) { Prompts = prompts; MaxTokens = maxTokens; @@ -105,7 +105,7 @@ internal CompletionsOptions(IList prompts, int? maxTokens, float? temper FrequencyPenalty = frequencyPenalty; GenerationSampleCount = generationSampleCount; InternalShouldStreamResponse = internalShouldStreamResponse; - InternalNonAzureModelName = internalNonAzureModelName; + DeploymentName = deploymentName; } /// /// An identifier for the caller or end user of the operation. This may be used for tracking diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.Serialization.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.Serialization.cs index 8130196e2e410..b6023c36f1d36 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.Serialization.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.Serialization.cs @@ -20,10 +20,10 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("user"u8); writer.WriteStringValue(User); } - if (Optional.IsDefined(InternalNonAzureModelName)) + if (Optional.IsDefined(DeploymentName)) { writer.WritePropertyName("model"u8); - writer.WriteStringValue(InternalNonAzureModelName); + writer.WriteStringValue(DeploymentName); } writer.WritePropertyName("input"u8); writer.WriteStartArray(); diff --git a/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.cs b/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.cs index 88b9de89ca20f..986586a57af87 100644 --- a/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.cs +++ b/sdk/openai/Azure.AI.OpenAI/src/Generated/EmbeddingsOptions.cs @@ -19,28 +19,12 @@ namespace Azure.AI.OpenAI /// public partial class EmbeddingsOptions { - /// Initializes a new instance of EmbeddingsOptions. - /// - /// Input texts to get embeddings for, encoded as a an array of strings. - /// Each input must not exceed 2048 tokens in length. - /// - /// Unless you are embedding code, we suggest replacing newlines (\n) in your input with a single space, - /// as we have observed inferior results when newlines are present. - /// - /// is null. - public EmbeddingsOptions(IEnumerable input) - { - Argument.AssertNotNull(input, nameof(input)); - - Input = input.ToList(); - } - /// Initializes a new instance of EmbeddingsOptions. /// /// An identifier for the caller or end user of the operation. This may be used for tracking /// or rate-limiting purposes. /// - /// + /// /// The model name to provide as part of this embeddings request. /// Not applicable to Azure OpenAI, where deployment information should be included in the Azure /// resource URI that's connected to. @@ -52,10 +36,10 @@ public EmbeddingsOptions(IEnumerable input) /// Unless you are embedding code, we suggest replacing newlines (\n) in your input with a single space, /// as we have observed inferior results when newlines are present. /// - internal EmbeddingsOptions(string user, string internalNonAzureModelName, IList input) + internal EmbeddingsOptions(string user, string deploymentName, IList input) { User = user; - InternalNonAzureModelName = internalNonAzureModelName; + DeploymentName = deploymentName; Input = input; } @@ -64,13 +48,5 @@ internal EmbeddingsOptions(string user, string internalNonAzureModelName, IList< /// or rate-limiting purposes. /// public string User { get; set; } - /// - /// Input texts to get embeddings for, encoded as a an array of strings. - /// Each input must not exceed 2048 tokens in length. - /// - /// Unless you are embedding code, we suggest replacing newlines (\n) in your input with a single space, - /// as we have observed inferior results when newlines are present. - /// - public IList Input { get; } } } diff --git a/sdk/openai/Azure.AI.OpenAI/tests/AudioTranscriptionTests.cs b/sdk/openai/Azure.AI.OpenAI/tests/AudioTranscriptionTests.cs index d7e473991a7f0..295b3c63a59fc 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/AudioTranscriptionTests.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/AudioTranscriptionTests.cs @@ -43,6 +43,7 @@ public async Task TranscriptionWorksWithFormat( var requestOptions = new AudioTranscriptionOptions() { + DeploymentName = deploymentOrModelName, AudioData = BinaryData.FromStream(audioFileStream), Temperature = (float)0.25, }; @@ -59,9 +60,7 @@ public async Task TranscriptionWorksWithFormat( }; } - Response response = await client.GetAudioTranscriptionAsync( - deploymentOrModelName, - requestOptions); + Response response = await client.GetAudioTranscriptionAsync(requestOptions); string text = response.Value.Text; Assert.That(text, Is.Not.Null.Or.Empty); diff --git a/sdk/openai/Azure.AI.OpenAI/tests/AudioTranslationTests.cs b/sdk/openai/Azure.AI.OpenAI/tests/AudioTranslationTests.cs index 20bf363c6fe3f..c616e874e4dbe 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/AudioTranslationTests.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/AudioTranslationTests.cs @@ -43,6 +43,7 @@ public async Task TranslationWorksWithFormat( var requestOptions = new AudioTranslationOptions() { + DeploymentName = deploymentOrModelName, AudioData = BinaryData.FromStream(audioFileStream), Temperature = (float)0.25, }; @@ -59,9 +60,7 @@ public async Task TranslationWorksWithFormat( }; } - Response response = await client.GetAudioTranslationAsync( - deploymentOrModelName, - requestOptions); + Response response = await client.GetAudioTranslationAsync(requestOptions); string text = response.Value.Text; Assert.That(text, Is.Not.Null.Or.Empty); diff --git a/sdk/openai/Azure.AI.OpenAI/tests/AzureChatExtensionsTests.cs b/sdk/openai/Azure.AI.OpenAI/tests/AzureChatExtensionsTests.cs index 7f56bb370064c..be0563a823de1 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/AzureChatExtensionsTests.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/AzureChatExtensionsTests.cs @@ -62,6 +62,7 @@ public async Task BasicSearchExtensionWorks( var requestOptions = new ChatCompletionsOptions() { + DeploymentName = deploymentOrModelName, Messages = { new ChatMessage(ChatRole.User, "What does PR complete mean?"), @@ -70,7 +71,7 @@ public async Task BasicSearchExtensionWorks( AzureExtensionsOptions = extensionsOptions, }; - Response response = await client.GetChatCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetChatCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response.Value, Is.Not.Null); Assert.That(response.Value.Choices, Is.Not.Null.Or.Empty); @@ -99,6 +100,7 @@ public async Task StreamingSearchExtensionWorks(OpenAIClientServiceTarget servic var requestOptions = new ChatCompletionsOptions() { + DeploymentName = deploymentOrModelName, Messages = { new ChatMessage(ChatRole.User, "What does PR complete mean?"), @@ -123,9 +125,7 @@ public async Task StreamingSearchExtensionWorks(OpenAIClientServiceTarget servic }, }; - Response response = await client.GetChatCompletionsStreamingAsync( - deploymentOrModelName, - requestOptions); + Response response = await client.GetChatCompletionsStreamingAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response.Value, Is.Not.Null); diff --git a/sdk/openai/Azure.AI.OpenAI/tests/ChatFunctionsTests.cs b/sdk/openai/Azure.AI.OpenAI/tests/ChatFunctionsTests.cs index 36b612ce299dd..cfb68c95c3be4 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/ChatFunctionsTests.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/ChatFunctionsTests.cs @@ -31,6 +31,7 @@ public async Task SimpleFunctionCallWorks(OpenAIClientServiceTarget serviceTarge var requestOptions = new ChatCompletionsOptions() { + DeploymentName = deploymentOrModelName, Functions = { s_futureTemperatureFunction }, Messages = { @@ -40,7 +41,7 @@ public async Task SimpleFunctionCallWorks(OpenAIClientServiceTarget serviceTarge MaxTokens = 512, }; - Response response = await client.GetChatCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetChatCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response.Value, Is.Not.Null); @@ -54,6 +55,7 @@ public async Task SimpleFunctionCallWorks(OpenAIClientServiceTarget serviceTarge ChatCompletionsOptions followupOptions = new() { + DeploymentName = deploymentOrModelName, Functions = { s_futureTemperatureFunction }, MaxTokens = 512, }; @@ -74,7 +76,7 @@ public async Task SimpleFunctionCallWorks(OpenAIClientServiceTarget serviceTarge new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }), }); - Response followupResponse = await client.GetChatCompletionsAsync(deploymentOrModelName, followupOptions); + Response followupResponse = await client.GetChatCompletionsAsync(followupOptions); Assert.That(followupResponse, Is.Not.Null); Assert.That(followupResponse.Value, Is.Not.Null); Assert.That(followupResponse.Value.Choices, Is.Not.Null.Or.Empty); @@ -94,6 +96,7 @@ public async Task StreamingFunctionCallWorks(OpenAIClientServiceTarget serviceTa var requestOptions = new ChatCompletionsOptions() { + DeploymentName = deploymentOrModelName, Functions = { s_futureTemperatureFunction }, Messages = { @@ -104,7 +107,7 @@ public async Task StreamingFunctionCallWorks(OpenAIClientServiceTarget serviceTa }; Response response - = await client.GetChatCompletionsStreamingAsync(deploymentOrModelName, requestOptions); + = await client.GetChatCompletionsStreamingAsync(requestOptions); Assert.That(response, Is.Not.Null); using StreamingChatCompletions streamingChatCompletions = response.Value; diff --git a/sdk/openai/Azure.AI.OpenAI/tests/OpenAIInferenceTests.cs b/sdk/openai/Azure.AI.OpenAI/tests/OpenAIInferenceTests.cs index 2b9034d25e912..86b0374ff246b 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/OpenAIInferenceTests.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/OpenAIInferenceTests.cs @@ -27,6 +27,7 @@ public async Task Completions(OpenAIClientServiceTarget serviceTarget) Assert.That(client, Is.InstanceOf()); CompletionsOptions requestOptions = new() { + DeploymentName = deploymentOrModelName, Prompts = { "Hello world", @@ -34,7 +35,7 @@ public async Task Completions(OpenAIClientServiceTarget serviceTarget) }, }; Assert.That(requestOptions, Is.InstanceOf()); - Response response = await client.GetCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response, Is.InstanceOf>()); Assert.That(response.Value, Is.Not.Null); @@ -43,19 +44,6 @@ public async Task Completions(OpenAIClientServiceTarget serviceTarget) Assert.That(response.Value.Choices[0].FinishReason, Is.Not.Null.Or.Empty); } - [RecordedTest] - [TestCase(OpenAIClientServiceTarget.Azure)] - [TestCase(OpenAIClientServiceTarget.NonAzure)] - public async Task SimpleCompletions(OpenAIClientServiceTarget serviceTarget) - { - OpenAIClient client = GetTestClient(serviceTarget); - string deploymentOrModelName = OpenAITestBase.GetDeploymentOrModelName( - serviceTarget, - OpenAIClientScenario.LegacyCompletions); - Response response = await client.GetCompletionsAsync(deploymentOrModelName, "Hello world!"); - Assert.That(response, Is.InstanceOf>()); - } - [RecordedTest] [TestCase(OpenAIClientServiceTarget.Azure)] [TestCase(OpenAIClientServiceTarget.NonAzure, Ignore = "Tokens not supported for non-Azure")] @@ -63,11 +51,13 @@ public async Task CompletionsWithTokenCredential(OpenAIClientServiceTarget servi { OpenAIClient client = GetTestClient(serviceTarget, OpenAIClientAuthenticationType.Token); string deploymentName = OpenAITestBase.GetDeploymentOrModelName(serviceTarget, OpenAIClientScenario.LegacyCompletions); - var requestOptions = new CompletionsOptions(); - requestOptions.Prompts.Add("Hello, world!"); - requestOptions.Prompts.Add("I can have multiple prompts"); + var requestOptions = new CompletionsOptions() + { + DeploymentName = deploymentName, + Prompts = { "Hello, world!", "I can have multiple prompts" }, + }; Assert.That(requestOptions, Is.InstanceOf()); - Response response = await client.GetCompletionsAsync(deploymentName, requestOptions); + Response response = await client.GetCompletionsAsync(requestOptions); Assert.That(response, Is.InstanceOf>()); Assert.That(response.Value.Choices, Is.Not.Null.Or.Empty); Assert.That(response.Value.Choices.Count, Is.EqualTo(2)); @@ -80,9 +70,13 @@ public async Task Embeddings(OpenAIClientServiceTarget serviceTarget) { OpenAIClient client = GetTestClient(serviceTarget); string deploymentOrModelName = OpenAITestBase.GetDeploymentOrModelName(serviceTarget, OpenAIClientScenario.Embeddings); - var embeddingsRequest = new EmbeddingsOptions("Your text string goes here"); - Assert.That(embeddingsRequest, Is.InstanceOf()); - Response response = await client.GetEmbeddingsAsync(deploymentOrModelName, embeddingsRequest); + var embeddingsOptions = new EmbeddingsOptions() + { + DeploymentName = deploymentOrModelName, + Input = { "Your text string goes here" }, + }; + Assert.That(embeddingsOptions, Is.InstanceOf()); + Response response = await client.GetEmbeddingsAsync(embeddingsOptions); Assert.That(response, Is.InstanceOf>()); Assert.That(response.Value, Is.Not.Null); Assert.That(response.Value.Data, Is.Not.Null.Or.Empty); @@ -102,6 +96,7 @@ public async Task CompletionsUsageField(OpenAIClientServiceTarget serviceTarget) string deploymentOrModelName = OpenAITestBase.GetDeploymentOrModelName(serviceTarget, OpenAIClientScenario.LegacyCompletions); var requestOptions = new CompletionsOptions() { + DeploymentName = deploymentOrModelName, Prompts = { "Hello world", @@ -112,7 +107,7 @@ public async Task CompletionsUsageField(OpenAIClientServiceTarget serviceTarget) LogProbabilityCount = 1, }; int expectedChoiceCount = (requestOptions.ChoicesPerPrompt ?? 1) * requestOptions.Prompts.Count; - Response response = await client.GetCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetCompletionsAsync(requestOptions); Assert.That(response.GetRawResponse(), Is.Not.Null.Or.Empty); Assert.That(response.Value, Is.Not.Null); Assert.That(response.Value.Id, Is.Not.Null.Or.Empty); @@ -147,6 +142,7 @@ public async Task ChatCompletions(OpenAIClientServiceTarget serviceTarget) OpenAIClientScenario.ChatCompletions); var requestOptions = new ChatCompletionsOptions() { + DeploymentName = deploymentOrModelName, Messages = { new ChatMessage(ChatRole.System, "You are a helpful assistant."), @@ -155,9 +151,7 @@ public async Task ChatCompletions(OpenAIClientServiceTarget serviceTarget) new ChatMessage(ChatRole.User, "What temperature should I bake pizza at?"), }, }; - Response response = await client.GetChatCompletionsAsync( - deploymentOrModelName, - requestOptions); + Response response = await client.GetChatCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response.Value, Is.InstanceOf()); Assert.That(response.Value.Id, Is.Not.Null.Or.Empty); @@ -180,12 +174,13 @@ public async Task ChatCompletionsContentFilterCategories(OpenAIClientServiceTarg string deploymentOrModelName = OpenAITestBase.GetDeploymentOrModelName(serviceTarget, OpenAIClientScenario.ChatCompletions); var requestOptions = new ChatCompletionsOptions() { + DeploymentName = deploymentOrModelName, Messages = { new ChatMessage(ChatRole.User, "How do I cook a bell pepper?"), }, }; - Response response = await client.GetChatCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetChatCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response.Value, Is.Not.Null); @@ -211,10 +206,11 @@ string deploymentOrModelName = OpenAITestBase.GetDeploymentOrModelName(serviceTarget, OpenAIClientScenario.LegacyCompletions); var requestOptions = new CompletionsOptions() { + DeploymentName = deploymentOrModelName, Prompts = { "How do I cook a bell pepper?" }, Temperature = 0 }; - Response response = await client.GetCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response.Value, Is.Not.Null); @@ -241,6 +237,7 @@ public async Task StreamingChatCompletions(OpenAIClientServiceTarget serviceTarg OpenAIClientScenario.ChatCompletions); var requestOptions = new ChatCompletionsOptions() { + DeploymentName = deploymentOrModelName, Messages = { new ChatMessage(ChatRole.System, "You are a helpful assistant."), @@ -251,7 +248,7 @@ public async Task StreamingChatCompletions(OpenAIClientServiceTarget serviceTarg MaxTokens = 512, }; Response streamingResponse - = await client.GetChatCompletionsStreamingAsync(deploymentOrModelName, requestOptions); + = await client.GetChatCompletionsStreamingAsync(requestOptions); Assert.That(streamingResponse, Is.Not.Null); using StreamingChatCompletions streamingChatCompletions = streamingResponse.Value; Assert.That(streamingChatCompletions, Is.InstanceOf()); @@ -290,6 +287,7 @@ public async Task AdvancedCompletionsOptions(OpenAIClientServiceTarget serviceTa string promptText = "Are bananas especially radioactive?"; var requestOptions = new CompletionsOptions() { + DeploymentName = deploymentOrModelName, Prompts = { promptText }, GenerationSampleCount = 3, Temperature = 0.75f, @@ -305,7 +303,7 @@ public async Task AdvancedCompletionsOptions(OpenAIClientServiceTarget serviceTa [15991] = -100, // 'anas' }, }; - Response response = await client.GetCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); string rawResponse = response.GetRawResponse().Content.ToString(); @@ -334,11 +332,14 @@ public async Task AdvancedCompletionsOptions(OpenAIClientServiceTarget serviceTa public void BadDeploymentFails(OpenAIClientServiceTarget serviceTarget) { OpenAIClient client = GetTestClient(serviceTarget); - var completionsRequest = new CompletionsOptions(); - completionsRequest.Prompts.Add("Hello world"); + var completionsRequest = new CompletionsOptions() + { + DeploymentName = "BAD_DEPLOYMENT_ID", + Prompts = { "Hello world" }, + }; RequestFailedException exception = Assert.ThrowsAsync(async () => { - await client.GetCompletionsAsync("BAD_DEPLOYMENT_ID", completionsRequest); + await client.GetCompletionsAsync(completionsRequest); }); Assert.AreEqual(404, exception.Status); Assert.That(exception.ErrorCode, Is.EqualTo("DeploymentNotFound")); @@ -353,6 +354,7 @@ public async Task TokenCutoff(OpenAIClientServiceTarget serviceTarget) string deploymentOrModelName = OpenAITestBase.GetDeploymentOrModelName(serviceTarget, OpenAIClientScenario.LegacyCompletions); var requestOptions = new CompletionsOptions() { + DeploymentName = deploymentOrModelName, Prompts = { "How long would it take an unladen swallow to travel between Seattle, WA" @@ -360,7 +362,7 @@ public async Task TokenCutoff(OpenAIClientServiceTarget serviceTarget) }, MaxTokens = 3, }; - Response response = await client.GetCompletionsAsync(deploymentOrModelName, requestOptions); + Response response = await client.GetCompletionsAsync(requestOptions); Assert.That(response, Is.Not.Null); Assert.That(response.Value, Is.Not.Null); Assert.That(response.Value.Choices, Is.Not.Null.Or.Empty); @@ -382,6 +384,7 @@ public async Task StreamingCompletions(OpenAIClientServiceTarget serviceTarget) string deploymentOrModelName = OpenAITestBase.GetDeploymentOrModelName(serviceTarget, OpenAIClientScenario.LegacyCompletions); var requestOptions = new CompletionsOptions() { + DeploymentName = deploymentOrModelName, Prompts = { "Tell me some jokes about mangos", @@ -391,9 +394,7 @@ public async Task StreamingCompletions(OpenAIClientServiceTarget serviceTarget) LogProbabilityCount = 1, }; - Response response = await client.GetCompletionsStreamingAsync( - deploymentOrModelName, - requestOptions); + Response response = await client.GetCompletionsStreamingAsync(requestOptions); Assert.That(response, Is.Not.Null); // StreamingCompletions implements IDisposable; capturing the .Value field of `response` with a `using` diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample01_Chatbot.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample01_Chatbot.cs index 0b8687ee008d6..0056142fe5170 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample01_Chatbot.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample01_Chatbot.cs @@ -20,11 +20,13 @@ public void GetChatbotResponse() var client = new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()); #endregion - string deploymentName = "text-davinci-003"; - string prompt = "What is Azure OpenAI?"; - Console.Write($"Input: {prompt}"); + CompletionsOptions completionsOptions = new() + { + DeploymentName = "text-davinci-003", + Prompts = { "What is Azure OpenAI?" }, + }; - Response completionsResponse = client.GetCompletions(deploymentName, prompt); + Response completionsResponse = client.GetCompletions(completionsOptions); string completion = completionsResponse.Value.Choices[0].Text; Console.WriteLine($"Chatbot: {completion}"); #endregion diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample02_ChatbotWithKey.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample02_ChatbotWithKey.cs index 09209f56edf86..bf4ce204c8447 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample02_ChatbotWithKey.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample02_ChatbotWithKey.cs @@ -19,25 +19,24 @@ public void GetMultipleResponsesWithSubscriptionKey() string endpoint = "https://myaccount.openai.azure.com/"; var client = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(key)); - List examplePrompts = new(){ - "How are you today?", - "What is Azure OpenAI?", - "Why do children love dinosaurs?", - "Generate a proof of Euler's identity", - "Describe in single words only the good things that come into your mind about your mother.", + CompletionsOptions completionsOptions = new() + { + DeploymentName = "text-davinci-003", + Prompts = + { + "How are you today?", + "What is Azure OpenAI?", + "Why do children love dinosaurs?", + "Generate a proof of Euler's identity", + "Describe in single words only the good things that come into your mind about your mother." + }, }; - string deploymentName = "text-davinci-003"; + Response completionsResponse = client.GetCompletions(completionsOptions); - foreach (string prompt in examplePrompts) + foreach (Choice choice in completionsResponse.Value.Choices) { - Console.Write($"Input: {prompt}"); - CompletionsOptions completionsOptions = new CompletionsOptions(); - completionsOptions.Prompts.Add(prompt); - - Response completionsResponse = client.GetCompletions(deploymentName, completionsOptions); - string completion = completionsResponse.Value.Choices[0].Text; - Console.WriteLine($"Chatbot: {completion}"); + Console.WriteLine($"Response for prompt {choice.Index}: {choice.Text}"); } #endregion } diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample03_SummarizeText.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample03_SummarizeText.cs index 97ef9d0df7f6b..aef9f2a94301d 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample03_SummarizeText.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample03_SummarizeText.cs @@ -38,12 +38,11 @@ Summarize the following text. Console.Write($"Input: {summarizationPrompt}"); var completionsOptions = new CompletionsOptions() { + DeploymentName = "text-davinci-003", Prompts = { summarizationPrompt }, }; - string deploymentName = "text-davinci-003"; - - Response completionsResponse = client.GetCompletions(deploymentName, completionsOptions); + Response completionsResponse = client.GetCompletions(completionsOptions); string completion = completionsResponse.Value.Choices[0].Text; Console.WriteLine($"Summarization: {completion}"); #endregion diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample04_StreamingChat.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample04_StreamingChat.cs index 21a25241f13d2..0c84d882c15a1 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample04_StreamingChat.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample04_StreamingChat.cs @@ -20,6 +20,7 @@ public async Task StreamingChatWithNonAzureOpenAI() var client = new OpenAIClient(nonAzureOpenAIApiKey, new OpenAIClientOptions()); var chatCompletionsOptions = new ChatCompletionsOptions() { + DeploymentName = "gpt-3.5-turbo", // Use DeploymentName for "model" with non-Azure clients Messages = { new ChatMessage(ChatRole.System, "You are a helpful assistant. You will talk like a pirate."), @@ -29,9 +30,8 @@ public async Task StreamingChatWithNonAzureOpenAI() } }; - Response response = await client.GetChatCompletionsStreamingAsync( - deploymentOrModelName: "gpt-3.5-turbo", - chatCompletionsOptions); + Response response + = await client.GetChatCompletionsStreamingAsync(chatCompletionsOptions); using StreamingChatCompletions streamingChatCompletions = response.Value; await foreach (StreamingChatChoice choice in streamingChatCompletions.GetChoicesStreaming()) diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample05_AzureOrNot.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample05_AzureOrNot.cs index b3425873b3ebc..61379a0725965 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample05_AzureOrNot.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample05_AzureOrNot.cs @@ -23,9 +23,11 @@ public async Task GetCompletionsFromAzureOrNonAzureOpenAIAsync(bool useAzureOpen : new OpenAIClient("your-api-key-from-platform.openai.com"); #endregion - Response response = await client.GetCompletionsAsync( - "text-davinci-003", // assumes a matching model deployment or model name - "Hello, world!"); + Response response = await client.GetCompletionsAsync(new CompletionsOptions() + { + DeploymentName = "text-davinci-003", // assumes a matching model deployment or model name + Prompts = { "Hello, world!" }, + }); foreach (Choice choice in response.Value.Choices) { diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample07_ChatFunctions.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample07_ChatFunctions.cs index 7b747adc2dfdc..f876936387df7 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample07_ChatFunctions.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample07_ChatFunctions.cs @@ -54,16 +54,17 @@ public async Task ChatFunctions() new(ChatRole.User, "What is the weather like in Boston?"), }; - var chatCompletionsOptions = new ChatCompletionsOptions(); + var chatCompletionsOptions = new ChatCompletionsOptions() + { + DeploymentName = "gpt-35-turbo-0613", + }; foreach (ChatMessage chatMessage in conversationMessages) { chatCompletionsOptions.Messages.Add(chatMessage); } chatCompletionsOptions.Functions.Add(getWeatherFuntionDefinition); - Response response = await client.GetChatCompletionsAsync( - "gpt-35-turbo-0613", - chatCompletionsOptions); + Response response = await client.GetChatCompletionsAsync(chatCompletionsOptions); #endregion #region Snippet:ChatFunctions:HandleFunctionCall diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample08_UseYourOwnData.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample08_UseYourOwnData.cs index fb142a89a82b9..e12ac32a75fb4 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample08_UseYourOwnData.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample08_UseYourOwnData.cs @@ -23,6 +23,7 @@ public async Task ChatUsingYourOwnData() #region Snippet:ChatUsingYourOwnData var chatCompletionsOptions = new ChatCompletionsOptions() { + DeploymentName = "gpt-35-turbo-0613", Messages = { new ChatMessage( @@ -46,9 +47,7 @@ public async Task ChatUsingYourOwnData() } } }; - Response response = await client.GetChatCompletionsAsync( - "gpt-35-turbo-0613", - chatCompletionsOptions); + Response response = await client.GetChatCompletionsAsync(chatCompletionsOptions); ChatMessage message = response.Value.Choices[0].Message; // The final, data-informed response still appears in the ChatMessages as usual Console.WriteLine($"{message.Role}: {message.Content}"); diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample09_TranscribeAudio.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample09_TranscribeAudio.cs index 4ee6c7e8a0552..081d3319ae6e1 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample09_TranscribeAudio.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample09_TranscribeAudio.cs @@ -23,13 +23,13 @@ public async Task TranscribeAudio() var transcriptionOptions = new AudioTranscriptionOptions() { + DeploymentName = "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI AudioData = BinaryData.FromStream(audioStreamFromFile), ResponseFormat = AudioTranscriptionFormat.Verbose, }; - Response transcriptionResponse = await client.GetAudioTranscriptionAsync( - deploymentId: "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI - transcriptionOptions); + Response transcriptionResponse + = await client.GetAudioTranscriptionAsync(transcriptionOptions); AudioTranscription transcription = transcriptionResponse.Value; // When using Simple, SRT, or VTT formats, only transcription.Text will be populated diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample10_TranslateAudio.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample10_TranslateAudio.cs index 21a1885949885..02716ef0310e4 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample10_TranslateAudio.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample10_TranslateAudio.cs @@ -23,13 +23,12 @@ public async Task TranslateAudio() var translationOptions = new AudioTranslationOptions() { + DeploymentName = "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI AudioData = BinaryData.FromStream(audioStreamFromFile), ResponseFormat = AudioTranslationFormat.Verbose, }; - Response translationResponse = await client.GetAudioTranslationAsync( - deploymentId: "my-whisper-deployment", // whisper-1 as model name for non-Azure OpenAI - translationOptions); + Response translationResponse = await client.GetAudioTranslationAsync(translationOptions); AudioTranslation translation = translationResponse.Value; // When using Simple, SRT, or VTT formats, only translation.Text will be populated diff --git a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample11_Embeddings.cs b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample11_Embeddings.cs index 92278a8272e33..47e65fa9b33b8 100644 --- a/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample11_Embeddings.cs +++ b/sdk/openai/Azure.AI.OpenAI/tests/Samples/Sample11_Embeddings.cs @@ -18,9 +18,12 @@ public async Task GenerateEmbeddings() var client = new OpenAIClient(new Uri(endpoint), new DefaultAzureCredential()); #region Snippet:GenerateEmbeddings - string deploymentOrModelName = "text-embedding-ada-002"; - EmbeddingsOptions embeddingsOptions = new("Your text string goes here"); - Response response = await client.GetEmbeddingsAsync(deploymentOrModelName, embeddingsOptions); + EmbeddingsOptions embeddingsOptions = new() + { + DeploymentName = "text-embedding-ada-002", + Input = { "Your text string goes here" }, + }; + Response response = await client.GetEmbeddingsAsync(embeddingsOptions); // The response includes the generated embedding. EmbeddingItem item = response.Value.Data[0];