GetCompletionWithOptions

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
G


using var api = new OllamaApiClient();

await apiClient.Models.PullModelAsync("llama3.2").EnsureSuccessAsync();


var response = await api.Completions.GenerateCompletionAsync(new GenerateCompletionRequest
{
    Model = "llama3.2",
    Prompt = "answer me just \"123\"",
    Stream = true,
    Options = new RequestOptions
    {
        Temperature = 0,
    },
});
Console.WriteLine(response.Response);