File tree 2 files changed +44
-0
lines changed
2 files changed +44
-0
lines changed Original file line number Diff line number Diff line change
1
+ // Copyright (c) Microsoft. All rights reserved.
2
+
3
+ using Microsoft . SemanticKernel ;
4
+ using Microsoft . SemanticKernel . ChatCompletion ;
5
+ using Resources ;
6
+
7
+ namespace ChatCompletion ;
8
+
9
+ /// <summary>
10
+ /// This sample shows how to use llama3.2-vision model with different content types (text and image).
11
+ /// </summary>
12
+ public class Ollama_ChatCompletionWithVision ( ITestOutputHelper output ) : BaseTest ( output )
13
+ {
14
+ /// <summary>
15
+ /// This sample uses a local image file and sends it to the model along
16
+ /// with a text message the get the description of the image.
17
+ /// </summary>
18
+ [ Fact ]
19
+ public async Task GetLocalImageDescription ( )
20
+ {
21
+ Console . WriteLine ( $ "======== Ollama - { nameof ( GetLocalImageDescription ) } ========") ;
22
+
23
+ var imageBytes = await EmbeddedResource . ReadAllAsync ( "sample_image.jpg" ) ;
24
+
25
+ var kernel = Kernel . CreateBuilder ( )
26
+ . AddOllamaChatCompletion ( modelId : "llama3.2-vision" , endpoint : new Uri ( TestConfiguration . Ollama . Endpoint ) )
27
+ . Build ( ) ;
28
+
29
+ var chatCompletionService = kernel . GetRequiredService < IChatCompletionService > ( ) ;
30
+
31
+ var chatHistory = new ChatHistory ( "You are a friendly assistant." ) ;
32
+
33
+ chatHistory . AddUserMessage (
34
+ [
35
+ new TextContent ( "What’s in this image?" ) ,
36
+ new ImageContent ( imageBytes , "image/jpg" )
37
+ ] ) ;
38
+
39
+ var reply = await chatCompletionService . GetChatMessageContentAsync ( chatHistory ) ;
40
+
41
+ Console . WriteLine ( reply . Content ) ;
42
+ }
43
+ }
Original file line number Diff line number Diff line change @@ -72,6 +72,7 @@ dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCom
72
72
- [ MultipleProviders_ChatHistoryReducer] ( https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MultipleProviders_ChatHistoryReducer.cs )
73
73
- [ Ollama_ChatCompletion] ( https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs )
74
74
- [ Ollama_ChatCompletionStreaming] ( https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs )
75
+ - [ Ollama_ChatCompletionWithVision] ( https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionWithVision.cs )
75
76
- [ Onnx_ChatCompletion] ( https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs )
76
77
- [ Onnx_ChatCompletionStreaming] ( https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs )
77
78
- [ OpenAI_ChatCompletion] ( https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs )
You can’t perform that action at this time.
0 commit comments