G
using var api = new OllamaApiClient();
await apiClient.Models.PullModelAsync("llama3.2").EnsureSuccessAsync();
var response = await api.Completions.GenerateCompletionAsync(new GenerateCompletionRequest
{
Model = "llama3.2",
Prompt = "answer me just \"123\"",
Stream = true,
Options = new RequestOptions
{
Temperature = 0,
},
});
Console.WriteLine(response.Response);