G
using var api = new OllamaApiClient();
await apiClient.Models.PullModelAsync("llama3.2").EnsureSuccessAsync();
IList<long>? context = null;
var enumerable = api.Completions.GenerateCompletionAsync("llama3.2", "answer 5 random words");
await foreach (var response in enumerable)
{
Console.WriteLine($"> {response.Response}");
context = response.Context;
}
var lastResponse = await api.Completions.GenerateCompletionAsync("llama3.2", "answer 123", stream: false, context: context).WaitAsync();
Console.WriteLine(lastResponse.Response);