Tools Guide
Combine search with tools. Tools →
The Perplexity provider connects Iris to search-augmented AI models. Perplexity models are uniquely designed to combine LLM capabilities with real-time web search, making them ideal for questions requiring up-to-date information.
package main
import ( "context" "fmt" "os"
"github.com/petal-labs/iris/core" "github.com/petal-labs/iris/providers/perplexity")
func main() { provider := perplexity.New(os.Getenv("PERPLEXITY_API_KEY")) client := core.NewClient(provider)
resp, err := client.Chat("sonar-pro"). User("What are the latest Go 1.23 features?"). GetResponse(context.Background())
if err != nil { panic(err) } fmt.Println(resp.Output)
// Access citations for _, citation := range resp.Citations { fmt.Printf("Source: %s\n", citation.URL) }}# Store in the encrypted keystore (recommended)iris keys set perplexity# Prompts for: Enter API key for perplexity: pplx-...export PERPLEXITY_API_KEY=pplx-...import "github.com/petal-labs/iris/providers/perplexity"// From an API key stringprovider := perplexity.New("pplx-...")
// From the PERPLEXITY_API_KEY environment variableprovider, err := perplexity.NewFromEnv()if err != nil { log.Fatal("PERPLEXITY_API_KEY not set:", err)}
// From the Iris keystoreprovider, err := perplexity.NewFromKeystore()| Option | Description | Default |
|---|---|---|
WithBaseURL(url) | Override the API base URL | https://api.perplexity.ai |
WithHTTPClient(client) | Use a custom *http.Client | Default client |
WithHeader(key, value) | Add a custom HTTP header | None |
WithTimeout(duration) | Set the request timeout | 60 seconds |
provider := perplexity.New("pplx-...", perplexity.WithTimeout(90 * time.Second),)| Feature | Supported | Notes |
|---|---|---|
| Chat | ✓ | All Sonar models |
| Streaming | ✓ | Real-time token streaming |
| Tool calling | ✓ | Function calling |
| Reasoning | ✓ | Deep thinking mode |
| Citations | ✓ | Source references |
| Image generation | Not supported | |
| Embeddings | Not supported | |
| Vision | Not supported |
| Model | Context | Best For |
|---|---|---|
sonar-pro | 200K | Deep research, complex queries |
sonar | 128K | General questions, quick answers |
sonar-reasoning-pro | 128K | Complex reasoning with search |
sonar-reasoning | 128K | Reasoning with search |
| Model | Context | Best For |
|---|---|---|
llama-3.1-sonar-large-128k-online | 128K | Large context, online search |
llama-3.1-sonar-small-128k-online | 128K | Fast, cost-effective |
resp, err := client.Chat("sonar-pro"). User("What are the current interest rates set by the Federal Reserve?"). GetResponse(ctx)
if err != nil { log.Fatal(err)}
fmt.Println(resp.Output)
// Access source citationsif len(resp.Citations) > 0 { fmt.Println("\nSources:") for _, citation := range resp.Citations { fmt.Printf("- %s: %s\n", citation.Title, citation.URL) }}stream, err := client.Chat("sonar"). User("What are the latest developments in AI regulation?"). GetStream(ctx)
if err != nil { log.Fatal(err)}
for chunk := range stream.Ch { fmt.Print(chunk.Content)}fmt.Println()
// Get final response with citationsfinal := <-stream.Finalfor _, citation := range final.Citations { fmt.Printf("Source: %s\n", citation.URL)}
if err := <-stream.Err; err != nil { log.Fatal(err)}For complex questions requiring deep analysis:
resp, err := client.Chat("sonar-reasoning-pro"). User("Analyze the economic implications of AI on the labor market in 2025."). Thinking(true). ThinkingBudget(5000). GetResponse(ctx)
if resp.Thinking != "" { fmt.Println("=== Analysis Process ===") fmt.Println(resp.Thinking)}
fmt.Println("=== Conclusions ===")fmt.Println(resp.Output)
// Citations from researchfor _, c := range resp.Citations { fmt.Printf("Source: %s\n", c.URL)}Combine search with custom tools:
calculatorTool := core.Tool{ Name: "calculate", Description: "Perform mathematical calculations", Parameters: map[string]interface{}{ "type": "object", "properties": map[string]interface{}{ "expression": map[string]interface{}{ "type": "string", "description": "Mathematical expression to evaluate", }, }, "required": []string{"expression"}, },}
resp, err := client.Chat("sonar-pro"). User("What is Apple's current stock price and what would 100 shares cost?"). Tools(calculatorTool). GetResponse(ctx)
// Perplexity finds the stock price, then calls calculatorif len(resp.ToolCalls) > 0 { call := resp.ToolCalls[0] result := calculate(call.Arguments)
finalResp, err := client.Chat("sonar-pro"). User("What is Apple's current stock price and what would 100 shares cost?"). Tools(calculatorTool). Assistant(resp.Output). ToolCall(call.ID, call.Name, call.Arguments). ToolResult(call.ID, result). GetResponse(ctx)
fmt.Println(finalResp.Output)}Control the search domain:
// Focus on academic sourcesresp, err := client.Chat("sonar-pro"). User("What does recent research say about the effects of intermittent fasting?"). SearchFocus(core.SearchFocusAcademic). GetResponse(ctx)
// Focus on news sourcesresp, err = client.Chat("sonar-pro"). User("What happened in tech news today?"). SearchFocus(core.SearchFocusNews). GetResponse(ctx)
// Focus on code/technical sourcesresp, err = client.Chat("sonar-pro"). User("How do I implement a trie in Go?"). SearchFocus(core.SearchFocusTechnical). GetResponse(ctx)For time-sensitive information:
resp, err := client.Chat("sonar-pro"). User("What are today's major stock market movements?"). GetResponse(ctx)
// The model automatically uses current date in searchesBuild on previous research:
// Initial queryresp1, _ := client.Chat("sonar-pro"). System("You are a research assistant."). User("What are the main AI safety concerns?"). GetResponse(ctx)
// Follow-up with contextresp2, _ := client.Chat("sonar-pro"). System("You are a research assistant."). User("What are the main AI safety concerns?"). Assistant(resp1.Output). User("What solutions are being proposed for the alignment problem specifically?"). GetResponse(ctx)Get multiple perspectives:
resp, err := client.Chat("sonar-pro"). System("When answering, provide perspectives from multiple sources and note any disagreements."). User("Is coffee good or bad for health?"). GetResponse(ctx)
// Response includes citations from different sourcesfor _, c := range resp.Citations { fmt.Printf("%s - %s\n", c.Title, c.URL)}resp, err := client.Chat("sonar-pro").User(prompt).GetResponse(ctx)if err != nil { var apiErr *core.APIError if errors.As(err, &apiErr) { switch apiErr.StatusCode { case 401: log.Fatal("Invalid API key") case 429: log.Printf("Rate limited. Retry after: %s", apiErr.RetryAfter) case 500, 503: log.Printf("Perplexity service error: %s", apiErr.Message) } }}resp, err := client.Chat("sonar-pro"). System("You are a thorough research assistant. Always cite your sources."). User("What are the latest breakthroughs in nuclear fusion energy?"). GetResponse(ctx)resp, err := client.Chat("sonar"). System("Summarize the key points from recent news."). User("What happened in technology this week?"). GetResponse(ctx)resp, err := client.Chat("sonar-pro"). System("Verify claims using current sources. Cite evidence."). User("Is it true that Go is the fastest growing programming language in 2024?"). GetResponse(ctx)| Task | Recommended Model |
|---|---|
| Quick answers | sonar |
| Deep research | sonar-pro |
| Complex reasoning | sonar-reasoning-pro |
| Cost-effective | sonar |
// Always process citations for factual queriesif len(resp.Citations) == 0 { log.Println("Warning: No sources cited for factual claim")}
// Verify important informationfor _, c := range resp.Citations { if strings.Contains(c.URL, "gov") || strings.Contains(c.URL, "edu") { fmt.Println("Authoritative source:", c.URL) }}client := core.NewClient(provider, core.WithRetryPolicy(&core.RetryPolicy{ MaxRetries: 3, InitialInterval: 1 * time.Second, MaxInterval: 30 * time.Second, BackoffMultiplier: 2.0, RetryOn: []int{429, 500, 503}, }),)Authorization: Bearer for authenticationTools Guide
Combine search with tools. Tools →
Streaming Guide
Stream research responses. Streaming →
Providers Overview
Compare all available providers. Providers →