varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-3-mini",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="What is the meaning of life?",},]);Console.WriteLine(response.Choices![0].Message?.Content);
Streaming
1 2 3 4 5 6 7 8 9101112
awaitforeach(varchunkinclient.Chat.CreateChatCompletionAsStreamAsync(model:"grok-3-mini",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="Tell me a short story.",},])){Console.Write(chunk.Choices?[0].Delta?.Content);}
usingSystem.Text.Json;vartools=newList<ChatCompletionTool>{newChatCompletionTool{Type=ChatCompletionToolType.Function,Function=newFunctionDefinition{Name="get_weather",Description="Get the current weather for a location.",Parameters=JsonSerializer.Deserialize<JsonElement>("""{"type":"object","properties":{"location":{"type":"string","description":"The city name."}},"required":["location"]}"""),},},};varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-3-mini",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="What is the weather in San Francisco?",},],tools:tools,toolChoice:newOneOf<CreateChatCompletionRequestToolChoice?,ChatCompletionNamedToolChoice>(CreateChatCompletionRequestToolChoice.Auto));vartoolCall=response.Choices![0].Message!.ToolCalls![0];Console.WriteLine($"{toolCall.Function.Name}({toolCall.Function.Arguments})");
Reasoning
1 2 3 4 5 6 7 8 910111213
varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-3-mini",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="What is 15 * 37? Think step by step.",},],reasoningEffort:CreateChatCompletionRequestReasoningEffort.High);Console.WriteLine(response.Choices![0].Message?.ReasoningContent);// reasoning traceConsole.WriteLine(response.Choices![0].Message?.Content);// final answer
varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-3-mini",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="Extract the capital of France.",},],responseFormat:newResponseFormat{Type=ResponseFormatType.JsonSchema,JsonSchema=newResponseFormatJsonSchema{Name="capital_response",Strict=true,Schema=new{type="object",properties=new{country=new{type="string"},capital=new{type="string"},},required=new[]{"country","capital"},additionalProperties=false,},},});Console.WriteLine(response.Choices![0].Message?.Content);// {"country":"France","capital":"Paris"}
varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-2-vision",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content=newOneOf<string,IList<ChatCompletionContentPart>>(newList<ChatCompletionContentPart>{newChatCompletionContentPart{Type=ChatCompletionContentPartType.Text,Text="Describe this image in one sentence.",},newChatCompletionContentPart{Type=ChatCompletionContentPartType.ImageUrl,ImageUrl=newChatCompletionContentPartImageUrl{Url="https://example.com/image.png",},},}),},]);Console.WriteLine(response.Choices![0].Message?.Content);
Image Generation
12345
varresponse=awaitclient.Images.CreateImageAsync(model:"grok-imagine-image",prompt:"A futuristic cityscape at sunset");Console.WriteLine(response.Data![0].Url);
Image Editing
123456789
varresponse=awaitclient.Images.CreateImageEditAsync(model:"grok-2-image",prompt:"Add a red hat to the person in the image",image:newImageInput{Url="https://example.com/photo.png",});Console.WriteLine(response.Data![0].Url);
Video Generation
1 2 3 4 5 6 7 8 9101112
varstatus=awaitclient.GenerateAndWaitAsync(newCreateVideoRequest{Model="grok-imagine-video",Prompt="A gentle ocean wave rolling onto a sandy beach at sunset",Duration=3,Resolution=CreateVideoRequestResolution.x480p,},pollingInterval:TimeSpan.FromSeconds(10),timeout:TimeSpan.FromMinutes(5));Console.WriteLine(status.Video?.Url);
Text-to-Speech
123456
byte[]audioBytes=awaitclient.Audio.CreateSpeechAsync(model:"tts-1",input:"Hello from xAI!",voice:CreateSpeechRequestVoice.Eve);File.WriteAllBytes("output.mp3",audioBytes);
usingvarvoiceClient=newRealtimeVoiceClient(apiKey);awaitvoiceClient.ConnectAsync();// Configure sessionawaitvoiceClient.SendEventAsync(RealtimeClientEvent.SessionUpdate(newRealtimeSessionConfig{Voice="Eve",Instructions="You are a helpful assistant.",Modalities=["text","audio"],TurnDetection=newRealtimeTurnDetection{Type="server_vad",Threshold=0.85,SilenceDurationMs=500,},}));// Send a text message and request responseawaitvoiceClient.SendEventAsync(RealtimeClientEvent.UserMessage("Say hello!"));awaitvoiceClient.SendEventAsync(RealtimeClientEvent.CreateResponse(["text"]));// Receive eventsawaitforeach(varserverEventinvoiceClient.ReceiveUpdatesAsync(cancellationToken)){if(serverEvent.IsAudioTranscriptDelta)Console.Write(serverEvent.Delta);elseif(serverEvent.IsResponseDone)break;}
Responses API
1 2 3 4 5 6 7 8 9101112
// Create a response (stored server-side)varresponse=awaitclient.Responses.CreateResponseAsync(model:"grok-3-mini",input:"What is 2+2? Answer with just the number.");Console.WriteLine(response.Output);// Retrieve a stored responsevarretrieved=awaitclient.Responses.GetResponseAsync(response.Id!);// Delete a stored responseawaitclient.Responses.DeleteResponseAsync(response.Id!);
// Submit a deferred request (processed asynchronously)varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-3-mini",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="Explain what a quasar is in two sentences.",},],deferred:true);// Poll for the resultvarresult=awaitclient.Chat.GetDeferredCompletionAsync(response.Id!);Console.WriteLine(result.Choices![0].Message?.Content);// Or use the convenience helper that submits + polls automatically:varcompleted=awaitclient.CreateDeferredAndWaitAsync(newCreateChatCompletionRequest{Model="grok-3-mini",Messages=[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="Write a haiku about programming.",},],},pollingInterval:TimeSpan.FromSeconds(5),timeout:TimeSpan.FromMinutes(2));Console.WriteLine(completed.Choices![0].Message?.Content);
Microsoft.Extensions.AI
This SDK covers xAI-specific endpoints (images, video, realtime, etc.). For standard IChatClient/IEmbeddingGenerator support, use CustomProviders.XAi() from the tryAGI.OpenAI package:
12345678
usingtryAGI.OpenAI;usingMicrosoft.Extensions.AI;usingvarapi=CustomProviders.XAi("API_KEY");IChatClientchatClient=api;varresponse=awaitchatClient.GetResponseAsync("Hello from Grok!");Console.WriteLine(response.Text);
Chat Completion
Send a simple chat completion request.
1 2 3 4 5 6 7 8 9101112131415
varclient=newXaiClient(apiKey);varmodelId=GetModelId();// Create a chat completion with a simple user message.varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:modelId,messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="Say 'Hello, World!' and nothing else.",},]);Console.WriteLine(response.Choices![0].Message?.Content);
Chat Completion Streaming
Stream a chat completion response token by token.
1 2 3 4 5 6 7 8 9101112131415161718
varclient=newXaiClient(apiKey);varmodelId=GetModelId();// Stream the response and print each chunk as it arrives.varchunks=newList<CreateChatCompletionStreamResponse>();awaitforeach(varchunkinclient.Chat.CreateChatCompletionAsStreamAsync(model:modelId,messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="Tell me a short story.",},])){chunks.Add(chunk);Console.Write(chunk.Choices?[0].Delta?.Content);}
Tool Calling
Use function tools to let the model call external functions.
varclient=newXaiClient(apiKey);varmodelId=GetModelId();// Define a function tool with a JSON Schema for its parameters.vartools=newList<ChatCompletionTool>{newChatCompletionTool{Type=ChatCompletionToolType.Function,Function=newFunctionDefinition{Name="get_weather",Description="Get the current weather for a location.",Parameters=JsonSerializer.Deserialize<JsonElement>("""{"type":"object","properties":{"location":{"type":"string","description":"The city name."}},"required":["location"]}"""),},},};// Send a message that should trigger the tool call.varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:modelId,messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="What is the weather in San Francisco?",},],tools:tools,toolChoice:newOneOf<CreateChatCompletionRequestToolChoice?,ChatCompletionNamedToolChoice>(CreateChatCompletionRequestToolChoice.Auto));varchoice=response.Choices![0];// Inspect the tool call the model wants to make.vartoolCall=choice.Message!.ToolCalls![0];Console.WriteLine($"{toolCall.Function.Name}({toolCall.Function.Arguments})");
Parallel Tool Calls
Call multiple tools in parallel within a single response.
varclient=newXaiClient(apiKey);varmodelId=GetModelId();// Define multiple tools that the model can call simultaneously.vartools=newList<ChatCompletionTool>{newChatCompletionTool{Type=ChatCompletionToolType.Function,Function=newFunctionDefinition{Name="get_weather",Description="Get the current weather for a location.",Parameters=JsonSerializer.Deserialize<JsonElement>("""{"type":"object","properties":{"location":{"type":"string","description":"The city name."}},"required":["location"]}"""),},},newChatCompletionTool{Type=ChatCompletionToolType.Function,Function=newFunctionDefinition{Name="get_time",Description="Get the current time for a timezone.",Parameters=JsonSerializer.Deserialize<JsonElement>("""{"type":"object","properties":{"timezone":{"type":"string","description":"The IANA timezone name."}},"required":["timezone"]}"""),},},};// Enable `parallelToolCalls` so the model can invoke multiple tools at once.varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:modelId,messages:[ new ChatCompletionMessage { Role = ChatCompletionMessageRole.User, Content = "What's the weather in Tokyo and what time is it in America/New_York?", }, ],tools:tools,parallelToolCalls:true,toolChoice:newOneOf<CreateChatCompletionRequestToolChoice?,ChatCompletionNamedToolChoice>(CreateChatCompletionRequestToolChoice.Auto));varchoice=response.Choices![0];"parallel tool calls should produce at least 2 tool calls");varfunctionNames=choice.Message.ToolCalls.Select(tc=>tc.Function.Name).ToList();foreach(vartcinchoice.Message.ToolCalls){Console.WriteLine($"{tc.Function.Name}({tc.Function.Arguments})");}
Reasoning
Use reasoning effort to get step-by-step thinking from grok-3-mini.
1 2 3 4 5 6 7 8 910111213141516171819
varclient=newXaiClient(apiKey);// Enable reasoning with high effort to get a thinking trace alongside the answer.varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-3-mini",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="What is 15 * 37? Think step by step.",},],reasoningEffort:CreateChatCompletionRequestReasoningEffort.High);varmessage=response.Choices![0].Message;"grok-3-mini with high reasoning effort should return reasoning content");Console.WriteLine($"Reasoning: {message.ReasoningContent}");Console.WriteLine($"Answer: {message.Content}");
Seed Determinism
Use a fixed seed and temperature=0 for reproducible output.
varclient=newXaiClient(apiKey);varmodelId=GetModelId();constintseed=42;conststringprompt="What is the capital of Japan? Answer with just the city name.";// Send the same request twice with the same seed and temperature=0.varresponse1=awaitclient.Chat.CreateChatCompletionAsync(model:modelId,messages:[ new ChatCompletionMessage { Role = ChatCompletionMessageRole.User, Content = prompt, }, ],seed:seed,temperature:0);varresponse2=awaitclient.Chat.CreateChatCompletionAsync(model:modelId,messages:[ new ChatCompletionMessage { Role = ChatCompletionMessageRole.User, Content = prompt, }, ],seed:seed,temperature:0);varcontent1=response1.Choices![0].Message?.Content;varcontent2=response2.Choices![0].Message?.Content;// With the same seed and temperature=0, outputs should be identical."same seed and temperature=0 should produce deterministic output");Console.WriteLine($"Response 1: {content1}");Console.WriteLine($"Response 2: {content2}");
varclient=newXaiClient(apiKey);// Send both text and an image URL as a multi-part content message.varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:"grok-2-vision",messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content=newOneOf<string,IList<ChatCompletionContentPart>>(newList<ChatCompletionContentPart>{newChatCompletionContentPart{Type=ChatCompletionContentPartType.Text,Text="Describe this image in one sentence.",},newChatCompletionContentPart{Type=ChatCompletionContentPartType.ImageUrl,ImageUrl=newChatCompletionContentPartImageUrl{Url="https://upload.wikimedia.org/wikipedia/commons/thumb/b/b0/NewTux.svg/150px-NewTux.svg.png",},},}),},]);Console.WriteLine(response.Choices![0].Message?.Content);
Image Generation
Generate an image from a text prompt.
12345678
varclient=newXaiClient(apiKey);// Generate an image using the Grok Imagine model.varresponse=awaitclient.Images.CreateImageAsync(model:"grok-imagine-image",prompt:"A simple red circle on a white background");Console.WriteLine(response.Data![0].Url);
Image Editing
Edit an existing image using a text prompt.
1 2 3 4 5 6 7 8 9101112
varclient=newXaiClient(apiKey);// Edit an image by providing a source image URL and an instruction prompt.varresponse=awaitclient.Images.CreateImageEditAsync(model:"grok-2-image",prompt:"Add a red hat to the person in the image",image:newImageInput{Url="https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/PNG_transparency_demonstration_1.png/280px-PNG_transparency_demonstration_1.png",});Console.WriteLine(response.Data![0].Url);
Video Generation
Generate a video from a text prompt using the polling helper.
1 2 3 4 5 6 7 8 9101112131415
varclient=newXaiClient(apiKey);// Use the `GenerateAndWaitAsync` helper to submit a video request and poll until done.varstatus=awaitclient.GenerateAndWaitAsync(newCreateVideoRequest{Model="grok-imagine-video",Prompt="A gentle ocean wave rolling onto a sandy beach at sunset",Duration=3,Resolution=CreateVideoRequestResolution.x480p,},pollingInterval:TimeSpan.FromSeconds(10),timeout:TimeSpan.FromMinutes(5));Console.WriteLine(status.Video?.Url);
Text to Speech
Convert text to speech audio.
1 2 3 4 5 6 7 8 91011
varclient=newXaiClient(apiKey);// Generate speech audio from text. Available voices: Eve, Ara, Rex, Sal, Leo.byte[]audioBytes=awaitclient.Audio.CreateSpeechAsync(model:"tts-1",input:"Hello from xAI!",voice:CreateSpeechRequestVoice.Eve);"audio output should contain meaningful data");Console.WriteLine($"Generated {audioBytes.Length} bytes of audio.");
Responses API
Create, retrieve, and delete server-stored responses.
1 2 3 4 5 6 7 8 910111213141516171819
varclient=newXaiClient(apiKey);varmodelId=GetModelId();// Create a response that is stored server-side for later retrieval.varresponse=awaitclient.Responses.CreateResponseAsync(model:modelId,input:"What is 2+2? Answer with just the number.");Console.WriteLine($"Response: {response.Output}");// Retrieve the stored response by ID.varretrieved=awaitclient.Responses.GetResponseAsync(response.Id!);Console.WriteLine($"Retrieved: {retrieved.Id}");// Delete the stored response when no longer needed.vardeleted=awaitclient.Responses.DeleteResponseAsync(response.Id!);Console.WriteLine($"Deleted: {deleted.Deleted}");
Deferred Completion
Submit a chat completion for asynchronous processing and poll for the result.
1 2 3 4 5 6 7 8 9101112131415161718192021
varclient=newXaiClient(apiKey);varmodelId=GetModelId();// Submit a deferred request — it returns immediately with a request ID.varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:modelId,messages:[newChatCompletionMessage{Role=ChatCompletionMessageRole.User,Content="Explain what a quasar is in two sentences.",},],deferred:true);// Poll for the result (the request is processed asynchronously).awaitTask.Delay(TimeSpan.FromSeconds(5));varresult=awaitclient.Chat.GetDeferredCompletionAsync(response.Id!);Console.WriteLine(result.Choices![0].Message?.Content);
Realtime Voice
Connect to the Realtime Voice Agent WebSocket API for bidirectional text/audio streaming.
varapiKey=Environment.GetEnvironmentVariable("XAI_API_KEY")is{Length:>0}apiKeyValue?apiKeyValue:thrownewAssertInconclusiveException("XAI_API_KEY environment variable is not found.");// Create a WebSocket client and connect to the xAI Realtime API.usingvarclient=newXaiRealtimeClient(apiKey);awaitclient.ConnectAsync();// Configure the session with voice, instructions, and turn detection.awaitclient.SendSessionUpdateAsync(newSessionUpdatePayload{Session=newSessionConfig{Voice=SessionConfigVoice.Eve,Instructions="You are a helpful assistant. Respond briefly.",Modalities=["text","audio"],TurnDetection=newTurnDetection{Type="server_vad",Threshold=0.85,SilenceDurationMs=500,},},});// Send a text message and request a text response.awaitclient.SendConversationItemCreateAsync(newConversationItemCreatePayload{Item=newConversationItem{Type="message",Role="user",Content=[newContentPart{Type="input_text",Text="Say hello!"}],},});awaitclient.SendResponseCreateAsync(newResponseCreatePayload{Response=newResponseConfig{Modalities=["text"],},});// Receive server events until the response is complete.usingvarcts=newCancellationTokenSource(TimeSpan.FromSeconds(30));varreceivedSessionUpdated=false;varreceivedResponseDone=false;string?transcriptText=null;awaitforeach(varserverEventinclient.ReceiveUpdatesAsync(cts.Token)){if(serverEvent.IsSessionUpdated){receivedSessionUpdated=true;}elseif(serverEvent.IsResponseOutputAudioTranscriptDelta){transcriptText=(transcriptText??"")+serverEvent.ResponseOutputAudioTranscriptDelta?.Delta;Console.Write(serverEvent.ResponseOutputAudioTranscriptDelta?.Delta);}elseif(serverEvent.IsResponseDone){receivedResponseDone=true;break;}elseif(serverEvent.IsError){}}
Multi-Turn Conversation
Continue a conversation across multiple turns with system instructions and history.
varclient=newXaiClient(apiKey);varmodelId=GetModelId();// Pass a system message and conversation history to maintain context across turns.varresponse=awaitclient.Chat.CreateChatCompletionAsync(model:modelId,messages:[ new ChatCompletionMessage { Role = ChatCompletionMessageRole.System, Content = "You are a helpful math tutor. Always show your work.", }, new ChatCompletionMessage { Role = ChatCompletionMessageRole.User, Content = "What is 7 * 8?", }, new ChatCompletionMessage { Role = ChatCompletionMessageRole.Assistant, Content = "7 * 8 = 56", }, new ChatCompletionMessage { Role = ChatCompletionMessageRole.User, Content = "Now divide that result by 4.", }, ]);varcontent=response.Choices![0].Message?.Content;"56 / 4 = 14, and the model should reference the previous result");Console.WriteLine(content);
Document Search
Search uploaded document collections using hybrid search.
1 2 3 4 5 6 7 8 910111213141516171819
varclient=newXaiClient(apiKey);varcollectionId=Environment.GetEnvironmentVariable("XAI_COLLECTION_ID")is{Length:>0}value?value:thrownewAssertInconclusiveException("XAI_COLLECTION_ID environment variable is not found.");// Search across document collections using hybrid (semantic + keyword) mode.varresponse=awaitclient.Collections.SearchDocumentsAsync(query:"What is xAI?",collectionIds:[collectionId],mode:SearchDocumentsRequestMode.Hybrid,maxNumResults:5);foreach(varresultinresponse.Results!){Console.WriteLine($"Score: {result.Score:F3} — {result.Content?[..Math.Min(80, result.Content?.Length ?? 0)]}...");}
File Upload
Upload a file for use with the Batch API.
1 2 3 4 5 6 7 8 91011
varclient=newXaiClient(apiKey);// Upload a file by providing its content, filename, and purpose.varcontent="Hello from xAI SDK integration test."u8.ToArray();varfile=awaitclient.Files.UploadFileAsync(file:content,filename:"test-upload.txt",purpose:"batch");Console.WriteLine($"Uploaded: {file.Id} ({file.Bytes} bytes)");
API Key Info
Retrieve information about the current API key.
1234567
varclient=newXaiClient(apiKey);// Check the current API key's metadata — useful for diagnostics and validation.varinfo=awaitclient.Auth.GetApiKeyInfoAsync();Console.WriteLine($"Key: {info.RedactedApiKey}");Console.WriteLine($"User: {info.UserId}");