Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: add amazon nova support (text) only #10021

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ internal IBedrockTextGenerationService CreateTextGenerationService(string modelI
}
throw new NotSupportedException($"Unsupported AI21 model: {modelId}");
case "AMAZON":
if (modelName.StartsWith("titan-", StringComparison.OrdinalIgnoreCase))
if (modelName.StartsWith("titan-", StringComparison.OrdinalIgnoreCase) || modelName.StartsWith("nova-", StringComparison.OrdinalIgnoreCase))
{
return new AmazonService();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
using System.Runtime.CompilerServices;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Amazon.BedrockRuntime;
Expand Down Expand Up @@ -76,7 +77,7 @@ internal async Task<IReadOnlyList<TextContent>> InvokeBedrockModelAsync(
try
{
var requestBody = this._ioTextService.GetInvokeModelRequestBody(this._modelId, prompt, executionSettings);
using var requestBodyStream = new MemoryStream(JsonSerializer.SerializeToUtf8Bytes(requestBody));
using var requestBodyStream = new MemoryStream(JsonSerializer.SerializeToUtf8Bytes(requestBody, new JsonSerializerOptions { DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull }));
invokeRequest.Body = requestBodyStream;

response = await this._bedrockRuntime.InvokeModelAsync(invokeRequest, cancellationToken).ConfigureAwait(false);
Expand Down Expand Up @@ -111,7 +112,7 @@ internal async Task<IReadOnlyList<TextContent>> InvokeBedrockModelAsync(
}
activityStatus = BedrockClientUtilities.ConvertHttpStatusCodeToActivityStatusCode(response.HttpStatusCode);
activity?.SetStatus(activityStatus);
IReadOnlyList<TextContent> textResponse = this._ioTextService.GetInvokeResponseBody(response);
IReadOnlyList<TextContent> textResponse = this._ioTextService.GetInvokeResponseBody(this._modelId, response);
activity?.SetCompletionResponse(textResponse);
return textResponse;
}
Expand Down Expand Up @@ -178,7 +179,7 @@ internal async IAsyncEnumerable<StreamingTextContent> StreamTextAsync(
{
continue;
}
IEnumerable<string> texts = this._ioTextService.GetTextStreamOutput(chunk);
IEnumerable<string> texts = this._ioTextService.GetTextStreamOutput(this._modelId, chunk);
foreach (var text in texts)
{
var content = new StreamingTextContent(text);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,15 @@ internal interface IBedrockTextGenerationService
/// <summary>
/// Extracts the text contents from the <see cref="InvokeModelResponse"/>.
/// </summary>
/// <param name="modelId">The model ID to be used as a request parameter.</param>
/// <param name="response">The <see cref="InvokeModelResponse"/> instance to be returned from the InvokeAsync Bedrock call.</param>
/// <returns>The list of TextContent objects for the Semantic Kernel output.</returns>
internal IReadOnlyList<TextContent> GetInvokeResponseBody(InvokeModelResponse response);
internal IReadOnlyList<TextContent> GetInvokeResponseBody(string modelId, InvokeModelResponse response);

/// <summary>
/// Converts the streaming JSON into <see cref="IEnumerable{String}"/> for output.
/// </summary>
/// <param name="chunk">The payloadPart bytes provided from the streaming response.</param>
/// <returns><see cref="IEnumerable{String}"/> output strings.</returns>
internal IEnumerable<string> GetTextStreamOutput(JsonNode chunk);
internal IEnumerable<string> GetTextStreamOutput(string modelId, JsonNode chunk);
}
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public object GetInvokeModelRequestBody(string modelId, string prompt, PromptExe
}

/// <inheritdoc/>
public IReadOnlyList<TextContent> GetInvokeResponseBody(InvokeModelResponse response)
public IReadOnlyList<TextContent> GetInvokeResponseBody(string modelId, InvokeModelResponse response)
{
using var reader = new StreamReader(response.Body);
var responseBody = JsonSerializer.Deserialize<AI21JambaResponse.AI21TextResponse>(reader.ReadToEnd());
Expand Down Expand Up @@ -108,7 +108,7 @@ public ConverseRequest GetConverseRequest(string modelId, ChatHistory chatHistor
}

/// <inheritdoc/>
public IEnumerable<string> GetTextStreamOutput(JsonNode chunk)
public IEnumerable<string> GetTextStreamOutput(string modelId, JsonNode chunk)
{
var choiceDeltaContent = chunk["choices"]?[0]?["delta"]?["content"];
if (choiceDeltaContent is not null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public object GetInvokeModelRequestBody(string modelId, string prompt, PromptExe
}

/// <inheritdoc/>
public IReadOnlyList<TextContent> GetInvokeResponseBody(InvokeModelResponse response)
public IReadOnlyList<TextContent> GetInvokeResponseBody(string modelId, InvokeModelResponse response)
{
using var reader = new StreamReader(response.Body);
var responseBody = JsonSerializer.Deserialize<AI21JurassicResponse>(reader.ReadToEnd());
Expand All @@ -48,7 +48,7 @@ public IReadOnlyList<TextContent> GetInvokeResponseBody(InvokeModelResponse resp
}

/// <inheritdoc/>
public IEnumerable<string> GetTextStreamOutput(JsonNode chunk)
public IEnumerable<string> GetTextStreamOutput(string modelId, JsonNode chunk)
{
throw new NotSupportedException("Streaming not supported by this model.");
}
Expand Down
Loading
Loading