-
Notifications
You must be signed in to change notification settings - Fork 3.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
.Net: Ollama - Adding Missing Samples (#8309)
### Motivation and Context Adding missing samples using the new Ollama Connector. - Embedding generation - Text Generation - Chat Completion - AIModelRouting Demo with Ollama Connector
- Loading branch information
1 parent
6ee36b8
commit 3b1d2dd
Showing
13 changed files
with
486 additions
and
21 deletions.
There are no files selected for viewing
114 changes: 114 additions & 0 deletions
114
dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,114 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System.Text; | ||
using Microsoft.SemanticKernel; | ||
using Microsoft.SemanticKernel.ChatCompletion; | ||
using Microsoft.SemanticKernel.Connectors.Ollama; | ||
|
||
namespace ChatCompletion; | ||
|
||
// The following example shows how to use Semantic Kernel with Ollama Chat Completion API | ||
public class Ollama_ChatCompletion(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
[Fact] | ||
public async Task ServicePromptAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.Ollama.ModelId); | ||
|
||
Console.WriteLine("======== Ollama - Chat Completion ========"); | ||
|
||
var chatService = new OllamaChatCompletionService( | ||
endpoint: new Uri(TestConfiguration.Ollama.Endpoint), | ||
modelId: TestConfiguration.Ollama.ModelId); | ||
|
||
Console.WriteLine("Chat content:"); | ||
Console.WriteLine("------------------------"); | ||
|
||
var chatHistory = new ChatHistory("You are a librarian, expert about books"); | ||
|
||
// First user message | ||
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); | ||
await MessageOutputAsync(chatHistory); | ||
|
||
// First assistant message | ||
var reply = await chatService.GetChatMessageContentAsync(chatHistory); | ||
chatHistory.Add(reply); | ||
await MessageOutputAsync(chatHistory); | ||
|
||
// Second user message | ||
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); | ||
await MessageOutputAsync(chatHistory); | ||
|
||
// Second assistant message | ||
reply = await chatService.GetChatMessageContentAsync(chatHistory); | ||
chatHistory.Add(reply); | ||
await MessageOutputAsync(chatHistory); | ||
|
||
/* Output: | ||
Chat content: | ||
------------------------ | ||
System: You are a librarian, expert about books | ||
------------------------ | ||
User: Hi, I'm looking for book suggestions | ||
------------------------ | ||
Assistant: Sure, I'd be happy to help! What kind of books are you interested in? Fiction or non-fiction? Any particular genre? | ||
------------------------ | ||
User: I love history and philosophy, I'd like to learn something new about Greece, any suggestion? | ||
------------------------ | ||
Assistant: Great! For history and philosophy books about Greece, here are a few suggestions: | ||
1. "The Greeks" by H.D.F. Kitto - This is a classic book that provides an overview of ancient Greek history and culture, including their philosophy, literature, and art. | ||
2. "The Republic" by Plato - This is one of the most famous works of philosophy in the Western world, and it explores the nature of justice and the ideal society. | ||
3. "The Peloponnesian War" by Thucydides - This is a detailed account of the war between Athens and Sparta in the 5th century BCE, and it provides insight into the political and military strategies of the time. | ||
4. "The Iliad" by Homer - This epic poem tells the story of the Trojan War and is considered one of the greatest works of literature in the Western canon. | ||
5. "The Histories" by Herodotus - This is a comprehensive account of the Persian Wars and provides a wealth of information about ancient Greek culture and society. | ||
I hope these suggestions are helpful! | ||
------------------------ | ||
*/ | ||
} | ||
|
||
[Fact] | ||
public async Task ChatPromptAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.Ollama.ModelId); | ||
|
||
StringBuilder chatPrompt = new(""" | ||
<message role="system">You are a librarian, expert about books</message> | ||
<message role="user">Hi, I'm looking for book suggestions</message> | ||
"""); | ||
|
||
var kernel = Kernel.CreateBuilder() | ||
.AddOllamaChatCompletion( | ||
endpoint: new Uri(TestConfiguration.Ollama.Endpoint ?? "http://localhost:11434"), | ||
modelId: TestConfiguration.Ollama.ModelId) | ||
.Build(); | ||
|
||
var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); | ||
|
||
chatPrompt.AppendLine($"<message role=\"assistant\"><![CDATA[{reply}]]></message>"); | ||
chatPrompt.AppendLine("<message role=\"user\">I love history and philosophy, I'd like to learn something new about Greece, any suggestion</message>"); | ||
|
||
reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); | ||
|
||
Console.WriteLine(reply); | ||
} | ||
|
||
/// <summary> | ||
/// Outputs the last message of the chat history | ||
/// </summary> | ||
private Task MessageOutputAsync(ChatHistory chatHistory) | ||
{ | ||
var message = chatHistory.Last(); | ||
|
||
Console.WriteLine($"{message.Role}: {message.Content}"); | ||
Console.WriteLine("------------------------"); | ||
|
||
return Task.CompletedTask; | ||
} | ||
} |
172 changes: 172 additions & 0 deletions
172
dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,172 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System.Text; | ||
using Microsoft.SemanticKernel; | ||
using Microsoft.SemanticKernel.ChatCompletion; | ||
using Microsoft.SemanticKernel.Connectors.Ollama; | ||
|
||
namespace ChatCompletion; | ||
|
||
/// <summary> | ||
/// These examples demonstrate the ways different content types are streamed by Ollama via the chat completion service. | ||
/// </summary> | ||
public class Ollama_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
/// <summary> | ||
/// This example demonstrates chat completion streaming using Ollama. | ||
/// </summary> | ||
[Fact] | ||
public Task StreamChatAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.Ollama.ModelId); | ||
|
||
Console.WriteLine("======== Ollama - Chat Completion Streaming ========"); | ||
|
||
var chatService = new OllamaChatCompletionService( | ||
endpoint: new Uri(TestConfiguration.Ollama.Endpoint), | ||
modelId: TestConfiguration.Ollama.ModelId); | ||
|
||
return this.StartStreamingChatAsync(chatService); | ||
} | ||
|
||
[Fact] | ||
public async Task StreamChatPromptAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.Ollama.ModelId); | ||
|
||
StringBuilder chatPrompt = new(""" | ||
<message role="system">You are a librarian, expert about books</message> | ||
<message role="user">Hi, I'm looking for book suggestions</message> | ||
"""); | ||
|
||
var kernel = Kernel.CreateBuilder() | ||
.AddOllamaChatCompletion( | ||
endpoint: new Uri(TestConfiguration.Ollama.Endpoint), | ||
modelId: TestConfiguration.Ollama.ModelId) | ||
.Build(); | ||
|
||
var reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); | ||
|
||
chatPrompt.AppendLine($"<message role=\"assistant\"><![CDATA[{reply}]]></message>"); | ||
chatPrompt.AppendLine("<message role=\"user\">I love history and philosophy, I'd like to learn something new about Greece, any suggestion</message>"); | ||
|
||
reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); | ||
|
||
Console.WriteLine(reply); | ||
} | ||
|
||
/// <summary> | ||
/// This example demonstrates how the chat completion service streams text content. | ||
/// It shows how to access the response update via StreamingChatMessageContent.Content property | ||
/// and alternatively via the StreamingChatMessageContent.Items property. | ||
/// </summary> | ||
[Fact] | ||
public async Task StreamTextFromChatAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.Ollama.ModelId); | ||
|
||
Console.WriteLine("======== Stream Text from Chat Content ========"); | ||
|
||
// Create chat completion service | ||
var chatService = new OllamaChatCompletionService( | ||
endpoint: new Uri(TestConfiguration.Ollama.Endpoint), | ||
modelId: TestConfiguration.Ollama.ModelId); | ||
|
||
// Create chat history with initial system and user messages | ||
ChatHistory chatHistory = new("You are a librarian, an expert on books."); | ||
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions."); | ||
chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?"); | ||
|
||
// Start streaming chat based on the chat history | ||
await foreach (StreamingChatMessageContent chatUpdate in chatService.GetStreamingChatMessageContentsAsync(chatHistory)) | ||
{ | ||
// Access the response update via StreamingChatMessageContent.Content property | ||
Console.Write(chatUpdate.Content); | ||
|
||
// Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property | ||
Console.Write(chatUpdate.Items.OfType<StreamingTextContent>().FirstOrDefault()); | ||
} | ||
} | ||
|
||
private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) | ||
{ | ||
Console.WriteLine("Chat content:"); | ||
Console.WriteLine("------------------------"); | ||
|
||
var chatHistory = new ChatHistory("You are a librarian, expert about books"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// First user message | ||
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// First assistant message | ||
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); | ||
|
||
// Second user message | ||
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// Second assistant message | ||
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); | ||
} | ||
|
||
private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) | ||
{ | ||
bool roleWritten = false; | ||
string fullMessage = string.Empty; | ||
|
||
await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) | ||
{ | ||
if (!roleWritten && chatUpdate.Role.HasValue) | ||
{ | ||
Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); | ||
roleWritten = true; | ||
} | ||
|
||
if (chatUpdate.Content is { Length: > 0 }) | ||
{ | ||
fullMessage += chatUpdate.Content; | ||
Console.Write(chatUpdate.Content); | ||
} | ||
} | ||
|
||
Console.WriteLine("\n------------------------"); | ||
chatHistory.AddMessage(authorRole, fullMessage); | ||
} | ||
|
||
private async Task<string> StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) | ||
{ | ||
bool roleWritten = false; | ||
string fullMessage = string.Empty; | ||
|
||
await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync<StreamingChatMessageContent>(prompt)) | ||
{ | ||
if (!roleWritten && chatUpdate.Role.HasValue) | ||
{ | ||
Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); | ||
roleWritten = true; | ||
} | ||
|
||
if (chatUpdate.Content is { Length: > 0 }) | ||
{ | ||
fullMessage += chatUpdate.Content; | ||
Console.Write(chatUpdate.Content); | ||
} | ||
} | ||
|
||
Console.WriteLine("\n------------------------"); | ||
return fullMessage; | ||
} | ||
|
||
/// <summary> | ||
/// Outputs the last message of the chat history | ||
/// </summary> | ||
private void OutputLastMessage(ChatHistory chatHistory) | ||
{ | ||
var message = chatHistory.Last(); | ||
|
||
Console.WriteLine($"{message.Role}: {message.Content}"); | ||
Console.WriteLine("------------------------"); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
35 changes: 35 additions & 0 deletions
35
dotnet/samples/Concepts/Memory/Ollama_EmbeddingGeneration.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using Microsoft.SemanticKernel; | ||
using Microsoft.SemanticKernel.Embeddings; | ||
using xRetry; | ||
|
||
#pragma warning disable format // Format item can be simplified | ||
#pragma warning disable CA1861 // Avoid constant arrays as arguments | ||
|
||
namespace Memory; | ||
|
||
// The following example shows how to use Semantic Kernel with Ollama API. | ||
public class Ollama_EmbeddingGeneration(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
[RetryFact(typeof(HttpOperationException))] | ||
public async Task RunEmbeddingAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.Ollama.EmbeddingModelId); | ||
|
||
Console.WriteLine("\n======= Ollama - Embedding Example ========\n"); | ||
|
||
Kernel kernel = Kernel.CreateBuilder() | ||
.AddOllamaTextEmbeddingGeneration( | ||
endpoint: new Uri(TestConfiguration.Ollama.Endpoint), | ||
modelId: TestConfiguration.Ollama.EmbeddingModelId) | ||
.Build(); | ||
|
||
var embeddingGenerator = kernel.GetRequiredService<ITextEmbeddingGenerationService>(); | ||
|
||
// Generate embeddings for each chunk. | ||
var embeddings = await embeddingGenerator.GenerateEmbeddingsAsync(["John: Hello, how are you?\nRoger: Hey, I'm Roger!"]); | ||
|
||
Console.WriteLine($"Generated {embeddings.Count} embeddings for the provided text"); | ||
} | ||
} |
Oops, something went wrong.