Skip to content

xAI

The xAI provider connects Iris to Grok models. Grok is known for its real-time knowledge, reasoning capabilities, and unique personality that can tackle questions other models may avoid.

package main
import (
"context"
"fmt"
"os"
"github.com/petal-labs/iris/core"
"github.com/petal-labs/iris/providers/xai"
)
func main() {
provider := xai.New(os.Getenv("XAI_API_KEY"))
client := core.NewClient(provider)
resp, err := client.Chat("grok-3").
User("Explain quantum computing in plain English.").
GetResponse(context.Background())
if err != nil {
panic(err)
}
fmt.Println(resp.Output)
}
Terminal window
# Store in the encrypted keystore (recommended)
iris keys set xai
# Prompts for: Enter API key for xai: xai-...
import "github.com/petal-labs/iris/providers/xai"
// From an API key string
provider := xai.New("xai-...")
// From the XAI_API_KEY environment variable
provider, err := xai.NewFromEnv()
if err != nil {
log.Fatal("XAI_API_KEY not set:", err)
}
// From the Iris keystore (falls back to environment)
provider, err := xai.NewFromKeystore()
OptionDescriptionDefault
WithBaseURL(url)Override the API base URLhttps://api.x.ai/v1
WithHTTPClient(client)Use a custom *http.ClientDefault client
WithHeader(key, value)Add a custom HTTP headerNone
WithTimeout(duration)Set the request timeout60 seconds
provider := xai.New("xai-...",
xai.WithTimeout(90 * time.Second),
)
FeatureSupportedNotes
ChatAll Grok models
StreamingReal-time token streaming
Tool callingFunction calling
VisionImage analysis with grok-vision-beta
ReasoningDeep thinking mode
Image generationNot supported
EmbeddingsNot supported
ModelContextBest For
grok-3128KComplex reasoning, latest model
grok-3-fast128KFaster responses
grok-2128KProduction workloads
grok-2-mini128KCost-effective
ModelContextBest For
grok-vision-beta8KImage analysis
resp, err := client.Chat("grok-3").
System("You are a helpful assistant with a witty personality.").
User("What's the best programming language and why is it Go?").
Temperature(0.7).
MaxTokens(500).
GetResponse(ctx)
if err != nil {
log.Fatal(err)
}
fmt.Println(resp.Output)
stream, err := client.Chat("grok-3").
System("You are a helpful assistant.").
User("Explain Go's concurrency model in detail.").
GetStream(ctx)
if err != nil {
log.Fatal(err)
}
for chunk := range stream.Ch {
fmt.Print(chunk.Content)
}
fmt.Println()
if err := <-stream.Err; err != nil {
log.Fatal(err)
}

Analyze images with Grok Vision:

imageData, err := os.ReadFile("photo.png")
if err != nil {
log.Fatal(err)
}
base64Data := base64.StdEncoding.EncodeToString(imageData)
resp, err := client.Chat("grok-vision-beta").
UserMultimodal().
Text("What's happening in this image?").
ImageBase64(base64Data, "image/png").
Done().
GetResponse(ctx)
fmt.Println(resp.Output)

Enable deep thinking for complex problems:

resp, err := client.Chat("grok-3").
System("You are a brilliant problem solver.").
User("Analyze the implications of quantum computing on cryptography.").
Thinking(true).
ThinkingBudget(5000).
GetResponse(ctx)
// Access reasoning process
if resp.Thinking != "" {
fmt.Println("=== Reasoning ===")
fmt.Println(resp.Thinking)
}
fmt.Println("=== Answer ===")
fmt.Println(resp.Output)
weatherTool := core.Tool{
Name: "get_weather",
Description: "Get current weather for a location",
Parameters: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"location": map[string]interface{}{
"type": "string",
"description": "City name",
},
},
"required": []string{"location"},
},
}
resp, err := client.Chat("grok-3").
User("What's the weather in San Francisco?").
Tools(weatherTool).
GetResponse(ctx)
if len(resp.ToolCalls) > 0 {
call := resp.ToolCalls[0]
result := getWeather(call.Arguments)
finalResp, err := client.Chat("grok-3").
User("What's the weather in San Francisco?").
Tools(weatherTool).
Assistant(resp.Output).
ToolCall(call.ID, call.Name, call.Arguments).
ToolResult(call.ID, result).
GetResponse(ctx)
fmt.Println(finalResp.Output)
}

Grok excels at questions about current events:

resp, err := client.Chat("grok-3").
System("You have access to real-time information from X/Twitter.").
User("What are people saying about the latest tech news?").
GetResponse(ctx)
resp1, _ := client.Chat("grok-3").
System("You are a witty assistant.").
User("Tell me a programming joke.").
GetResponse(ctx)
resp2, _ := client.Chat("grok-3").
System("You are a witty assistant.").
User("Tell me a programming joke.").
Assistant(resp1.Output).
User("Now explain why it's funny.").
GetResponse(ctx)
resp, err := client.Chat("grok-3").User(prompt).GetResponse(ctx)
if err != nil {
var apiErr *core.APIError
if errors.As(err, &apiErr) {
switch apiErr.StatusCode {
case 401:
log.Fatal("Invalid API key")
case 429:
log.Printf("Rate limited. Retry after: %s", apiErr.RetryAfter)
case 500, 503:
log.Printf("xAI service error: %s", apiErr.Message)
}
}
}
TaskRecommended Model
General chatgrok-2-mini
Complex reasoninggrok-3
Fast responsesgrok-3-fast
Image analysisgrok-vision-beta
client := core.NewClient(provider,
core.WithRetryPolicy(&core.RetryPolicy{
MaxRetries: 3,
InitialInterval: 1 * time.Second,
MaxInterval: 30 * time.Second,
BackoffMultiplier: 2.0,
RetryOn: []int{429, 500, 503},
}),
)
  • Uses Authorization: Bearer for authentication
  • Grok models have access to real-time X/Twitter data
  • The provider is safe for concurrent use after construction
  • Grok tends to have a more direct and humorous communication style

Tools Guide

Learn advanced tool calling. Tools →

Providers Overview

Compare all available providers. Providers →