usingvarollama=newOllamaClient();// or if you have a custom server// using var ollama = new OllamaClient(baseUri: new Uri("http://10.10.5.85:11434"));varmodels=awaitollama.ListAsync();// Pulling a model and reporting progressawaitforeach(varresponseinollama.PullAsStreamAsync("all-minilm")){Console.WriteLine($"{response.Status}. Progress: {response.Completed}/{response.Total}");}// or just pull the model and wait for it to finishawaitollama.PullAsStreamAsync("all-minilm").EnsureSuccessAsync();// Generating an embeddingvarembedding=awaitollama.EmbedAsync(model:"all-minilm",input:"hello");// Streaming a completion directly into the consolevarenumerable=ollama.GenerateAsStreamAsync("llama3.2","answer 5 random words");awaitforeach(varresponseinenumerable){Console.WriteLine($"> {response.Response}");}varlastResponse=awaitollama.GenerateAsync("llama3.2","answer 123");Console.WriteLine(lastResponse.Response);varchat=ollama.Chat("mistral");while(true){varmessage=awaitchat.SendAsync("answer 123");Console.WriteLine(message.Content);varnewMessage=Console.ReadLine();awaitchat.SendAsync(newMessage);}
Streaming Chat State
1 2 3 4 5 6 7 8 91011121314151617
usingvarollama=newOllamaClient();varchat=ollama.Chat("mistral");varmessage=awaitchat.SendAsync(message:"answer 5 random words",onResponseChunk:(isFirstChunk,chunk)=>{if(isFirstChunk){Console.Write("> ");}Console.Write(chunk);});Console.WriteLine();Console.WriteLine(message.Content);
Tools
1 2 3 4 5 6 7 8 91011121314151617
usingvarollama=newOllamaClient();varchat=ollama.Chat(model:"llama3.2",systemMessage:"You are a helpful weather assistant.",autoCallTools:true);varservice=newWeatherService();chat.AddToolService(service.AsTools().AsOllamaTools(),service.AsCalls());try{_=awaitchat.SendAsync("What is the current temperature in Dubai, UAE in Celsius?");}finally{Console.WriteLine(chat.PrintMessages());}
1 2 3 4 5 6 7 8 91011
> System:
You are a helpful weather assistant.
> User:
What is the current temperature in Dubai, UAE in Celsius?
> Assistant:
Tool calls:
GetCurrentWeather({"location":"Dubai, UAE","unit":"celsius"})
> Tool:
{"location":"Dubai, UAE","temperature":22,"unit":"celsius","description":"Sunny"}
> Assistant:
The current temperature in Dubai, UAE is 22°C.
usingCSharpToJsonSchema;publicenumUnit{Celsius,Fahrenheit,}publicclassWeather{publicstringLocation{get;set;}=string.Empty;publicdoubleTemperature{get;set;}publicUnitUnit{get;set;}publicstringDescription{get;set;}=string.Empty;}[GenerateJsonSchema]publicinterfaceIWeatherFunctions{[Description("Get the current weather in a given location")]publicTask<Weather>GetCurrentWeatherAsync([Description("The city and state, e.g. San Francisco, CA")]stringlocation,Unitunit=Unit.Celsius,CancellationTokencancellationToken=default);}publicclassWeatherService:IWeatherFunctions{publicTask<Weather>GetCurrentWeatherAsync(stringlocation,Unitunit=Unit.Celsius,CancellationTokencancellationToken=default){returnTask.FromResult(newWeather{Location=location,Temperature=22.0,Unit=unit,Description="Sunny",});}}
LangMate - A modular and extensible AI chat application platform built on this SDK:
- LangMate.Core SDK - Developer-friendly wrapper for easy Ollama integration in .NET apps
- Blazor Server Chat UI - Real-time, interactive chat interface with streaming responses
- RESTful Web API - Backend service with OpenAPI documentation (Scalar integration)
- MongoDB Integration - Persistent chat history and caching layer
- Polly-Based Resilience - Circuit breakers, retry logic, and timeout policies
- File Upload Support - Multimodal capabilities with base64 image preview for vision models
- .NET Aspire Compatible - Full orchestration support for Docker/Kubernetes deployment
- Production-ready .NET 9 implementation with clean, testable architecture
awaitusingvarcontainer=awaitEnvironment.PrepareAsync(TestModels.Chat);varchat=container.Client.Chat(TestModels.Chat);varmessage=awaitchat.SendAsync("answer 5 random words");Console.WriteLine(message.Content);
Chat Client Five Random Words Streaming
1 2 3 4 5 6 7 8 9101112131415161718192021
awaitusingvarenvironment=awaitEnvironment.PrepareAsync(TestModels.Chat);IChatClientclient=environment.Client;varupdates=client.GetStreamingResponseAsync(messages:[ new MeaiChatMessage(MeaiChatRole.User, "Generate 5 random words.") ],options:newChatOptions{ModelId=TestModels.Chat,});vardeltas=newList<string>();awaitforeach(varupdateinupdates){if(!string.IsNullOrWhiteSpace(update.Text)){deltas.Add(update.Text);}}
Chat Streaming
1 2 3 4 5 6 7 8 9101112131415161718192021
awaitusingvarcontainer=awaitEnvironment.PrepareAsync(TestModels.Chat);varchat=container.Client.Chat(TestModels.Chat);vardeltas=newList<string>();varmessage=awaitchat.SendAsync(message:"answer 5 random words",onResponseChunk:(isFirstChunk,chunk)=>{if(isFirstChunk){Console.Write("> ");}if(chunk!=null){deltas.Add(chunk);Console.Write(chunk);}});Console.WriteLine();
Chat Client Five Random Words
1 2 3 4 5 6 7 8 9101112
awaitusingvarenvironment=awaitEnvironment.PrepareAsync(TestModels.Chat);IChatClientclient=environment.Client;varresponse=awaitclient.GetResponseAsync(messages:[ new MeaiChatMessage(MeaiChatRole.User, "Generate 5 random words.") ],options:newChatOptions{ModelId=TestModels.Chat,});
Chat Client Get Service Returns Chat Client Metadata
awaitusingvarcontainer=awaitEnvironment.PrepareAsync(TestModels.Chat);varenumerable=container.Client.GenerateAsStreamAsync(TestModels.Chat,"answer 5 random words");awaitforeach(varresponseinenumerable){Console.WriteLine($"> {response.Response}");}varlastResponse=awaitcontainer.Client.GenerateAsync(TestModels.Chat,"answer 123");Console.WriteLine(lastResponse.Response);
Get Completion With Options
1 2 3 4 5 6 7 8 9101112
awaitusingvarcontainer=awaitEnvironment.PrepareAsync(TestModels.Chat);varresponse=awaitcontainer.Client.GenerateAsync(newGenerateRequest{Model=TestModels.Chat,Prompt="answer me just \"123\"",Options=newModelOptions{Temperature=0,},});Console.WriteLine(response.Response);
awaitusingvarcontainer=awaitEnvironment.PrepareAsync(TestModels.Reader);varenumerable=container.Client.GenerateAsStreamAsync(TestModels.Reader,"""<html><body><h3>Whyistheskyblue?</h3><p>Theskyappearsbluebecauseofthewaylightfromthesunisreflectedbytheatmosphere.Theatmosphereismadeupofgases,includingnitrogenandoxygen,whichscatterlightinalldirections.Thisscatteringcausesthesunlighttoappearasarainbowofcolors,withredlightscatteredmorethanothercolors.</p></body></html>""");awaitforeach(varresponseinenumerable){Console.Write(response.Response);}// ### Why is the sky blue?//// The sky appears blue because of the way light from the sun is reflected by the atmosphere. The atmosphere is made up of gases, including nitrogen and oxygen, which scatter light in all directions. This scattering causes the sunlight to appear as a rainbow of colors, with red light scattered more than other colors.
awaitusingvarcontainer=awaitEnvironment.PrepareAsync(TestModels.Chat);varmessages=newList<ChatMessage>{"You are a helpful weather assistant. Use the provided tools for weather questions.".AsSystemMessage(),"What is the current temperature in Dubai, UAE in Celsius?".AsUserMessage(),};varmodel=TestModels.Chat;try{varservice=newWeatherService();vartools=service.AsTools().AsOllamaTools();varresponse=awaitcontainer.Client.ChatAsync(model,messages,tools:tools,options:newModelOptions{Temperature=0,Seed=1,});varassistantMessage=response.Message??thrownewInvalidOperationException("Expected a response message.");messages.Add(assistantMessage.ToChatMessage());foreach(varcallinassistantMessage.ToolCalls!){varargumentsAsJson=call.Function?.Arguments==null?string.Empty:call.Function.Arguments.AsJson();varjson=awaitservice.CallAsync(functionName:call.Function?.Name??string.Empty,argumentsAsJson:argumentsAsJson);messages.Add(json.AsToolMessage());}response=awaitcontainer.Client.ChatAsync(model,messages,tools:tools,options:newModelOptions{Temperature=0,Seed=1,});messages.Add((response.Message??thrownewInvalidOperationException("Expected a response message.")).ToChatMessage());}finally{Console.WriteLine(Ollama.Chat.PrintMessages(messages));}
Tools In Chat
1 2 3 4 5 6 7 8 91011121314151617181920212223
awaitusingvarcontainer=awaitEnvironment.PrepareAsync(TestModels.Chat);varchat=container.Client.Chat(model:TestModels.Chat,systemMessage:"You are a helpful weather assistant. Use the provided tools for weather questions.",autoCallTools:true);chat.Options=newModelOptions{Temperature=0,Seed=1,};varservice=newWeatherService();chat.AddToolService(service.AsTools().AsOllamaTools(),service.AsCalls());try{_=awaitchat.SendAsync("What is the current temperature in Dubai, UAE in Celsius?");}finally{Console.WriteLine(chat.PrintMessages());}