This example assumes using Ollama; is in scope and apiKey contains your Ollama API key.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52 | await using var container = await Environment.PrepareAsync(TestModels.Chat);
var messages = new List<ChatMessage>
{
"You are a helpful weather assistant. Use the provided tools for weather questions.".AsSystemMessage(),
"What is the current temperature in Dubai, UAE in Celsius?".AsUserMessage(),
};
var model = TestModels.Chat;
try
{
var service = new WeatherService();
var tools = service.AsTools().AsOllamaTools();
var response = await container.Client.ChatAsync(
model,
messages,
tools: tools,
options: new ModelOptions
{
Temperature = 0,
Seed = 1,
});
var assistantMessage = response.Message ?? throw new InvalidOperationException("Expected a response message.");
messages.Add(assistantMessage.ToChatMessage());
foreach (var call in assistantMessage.ToolCalls!)
{
var argumentsAsJson = call.Function?.Arguments == null
? string.Empty
: call.Function.Arguments.AsJson();
var json = await service.CallAsync(
functionName: call.Function?.Name ?? string.Empty,
argumentsAsJson: argumentsAsJson);
messages.Add(json.AsToolMessage());
}
response = await container.Client.ChatAsync(
model,
messages,
tools: tools,
options: new ModelOptions
{
Temperature = 0,
Seed = 1,
});
messages.Add((response.Message ?? throw new InvalidOperationException("Expected a response message.")).ToChatMessage());
}
finally
{
Console.WriteLine(Ollama.Chat.PrintMessages(messages));
}
|