Skip to content

Perplexity

The Perplexity provider connects Iris to search-augmented AI models. Perplexity models are uniquely designed to combine LLM capabilities with real-time web search, making them ideal for questions requiring up-to-date information.

package main
import (
"context"
"fmt"
"os"
"github.com/petal-labs/iris/core"
"github.com/petal-labs/iris/providers/perplexity"
)
func main() {
provider := perplexity.New(os.Getenv("PERPLEXITY_API_KEY"))
client := core.NewClient(provider)
resp, err := client.Chat("sonar-pro").
User("What are the latest Go 1.23 features?").
GetResponse(context.Background())
if err != nil {
panic(err)
}
fmt.Println(resp.Output)
// Access citations
for _, citation := range resp.Citations {
fmt.Printf("Source: %s\n", citation.URL)
}
}
Terminal window
# Store in the encrypted keystore (recommended)
iris keys set perplexity
# Prompts for: Enter API key for perplexity: pplx-...
import "github.com/petal-labs/iris/providers/perplexity"
// From an API key string
provider := perplexity.New("pplx-...")
// From the PERPLEXITY_API_KEY environment variable
provider, err := perplexity.NewFromEnv()
if err != nil {
log.Fatal("PERPLEXITY_API_KEY not set:", err)
}
// From the Iris keystore
provider, err := perplexity.NewFromKeystore()
OptionDescriptionDefault
WithBaseURL(url)Override the API base URLhttps://api.perplexity.ai
WithHTTPClient(client)Use a custom *http.ClientDefault client
WithHeader(key, value)Add a custom HTTP headerNone
WithTimeout(duration)Set the request timeout60 seconds
provider := perplexity.New("pplx-...",
perplexity.WithTimeout(90 * time.Second),
)
FeatureSupportedNotes
ChatAll Sonar models
StreamingReal-time token streaming
Tool callingFunction calling
ReasoningDeep thinking mode
CitationsSource references
Image generationNot supported
EmbeddingsNot supported
VisionNot supported
ModelContextBest For
sonar-pro200KDeep research, complex queries
sonar128KGeneral questions, quick answers
sonar-reasoning-pro128KComplex reasoning with search
sonar-reasoning128KReasoning with search
ModelContextBest For
llama-3.1-sonar-large-128k-online128KLarge context, online search
llama-3.1-sonar-small-128k-online128KFast, cost-effective
resp, err := client.Chat("sonar-pro").
User("What are the current interest rates set by the Federal Reserve?").
GetResponse(ctx)
if err != nil {
log.Fatal(err)
}
fmt.Println(resp.Output)
// Access source citations
if len(resp.Citations) > 0 {
fmt.Println("\nSources:")
for _, citation := range resp.Citations {
fmt.Printf("- %s: %s\n", citation.Title, citation.URL)
}
}
stream, err := client.Chat("sonar").
User("What are the latest developments in AI regulation?").
GetStream(ctx)
if err != nil {
log.Fatal(err)
}
for chunk := range stream.Ch {
fmt.Print(chunk.Content)
}
fmt.Println()
// Get final response with citations
final := <-stream.Final
for _, citation := range final.Citations {
fmt.Printf("Source: %s\n", citation.URL)
}
if err := <-stream.Err; err != nil {
log.Fatal(err)
}

For complex questions requiring deep analysis:

resp, err := client.Chat("sonar-reasoning-pro").
User("Analyze the economic implications of AI on the labor market in 2025.").
Thinking(true).
ThinkingBudget(5000).
GetResponse(ctx)
if resp.Thinking != "" {
fmt.Println("=== Analysis Process ===")
fmt.Println(resp.Thinking)
}
fmt.Println("=== Conclusions ===")
fmt.Println(resp.Output)
// Citations from research
for _, c := range resp.Citations {
fmt.Printf("Source: %s\n", c.URL)
}

Combine search with custom tools:

calculatorTool := core.Tool{
Name: "calculate",
Description: "Perform mathematical calculations",
Parameters: map[string]interface{}{
"type": "object",
"properties": map[string]interface{}{
"expression": map[string]interface{}{
"type": "string",
"description": "Mathematical expression to evaluate",
},
},
"required": []string{"expression"},
},
}
resp, err := client.Chat("sonar-pro").
User("What is Apple's current stock price and what would 100 shares cost?").
Tools(calculatorTool).
GetResponse(ctx)
// Perplexity finds the stock price, then calls calculator
if len(resp.ToolCalls) > 0 {
call := resp.ToolCalls[0]
result := calculate(call.Arguments)
finalResp, err := client.Chat("sonar-pro").
User("What is Apple's current stock price and what would 100 shares cost?").
Tools(calculatorTool).
Assistant(resp.Output).
ToolCall(call.ID, call.Name, call.Arguments).
ToolResult(call.ID, result).
GetResponse(ctx)
fmt.Println(finalResp.Output)
}

Control the search domain:

// Focus on academic sources
resp, err := client.Chat("sonar-pro").
User("What does recent research say about the effects of intermittent fasting?").
SearchFocus(core.SearchFocusAcademic).
GetResponse(ctx)
// Focus on news sources
resp, err = client.Chat("sonar-pro").
User("What happened in tech news today?").
SearchFocus(core.SearchFocusNews).
GetResponse(ctx)
// Focus on code/technical sources
resp, err = client.Chat("sonar-pro").
User("How do I implement a trie in Go?").
SearchFocus(core.SearchFocusTechnical).
GetResponse(ctx)

For time-sensitive information:

resp, err := client.Chat("sonar-pro").
User("What are today's major stock market movements?").
GetResponse(ctx)
// The model automatically uses current date in searches

Build on previous research:

// Initial query
resp1, _ := client.Chat("sonar-pro").
System("You are a research assistant.").
User("What are the main AI safety concerns?").
GetResponse(ctx)
// Follow-up with context
resp2, _ := client.Chat("sonar-pro").
System("You are a research assistant.").
User("What are the main AI safety concerns?").
Assistant(resp1.Output).
User("What solutions are being proposed for the alignment problem specifically?").
GetResponse(ctx)

Get multiple perspectives:

resp, err := client.Chat("sonar-pro").
System("When answering, provide perspectives from multiple sources and note any disagreements.").
User("Is coffee good or bad for health?").
GetResponse(ctx)
// Response includes citations from different sources
for _, c := range resp.Citations {
fmt.Printf("%s - %s\n", c.Title, c.URL)
}
resp, err := client.Chat("sonar-pro").User(prompt).GetResponse(ctx)
if err != nil {
var apiErr *core.APIError
if errors.As(err, &apiErr) {
switch apiErr.StatusCode {
case 401:
log.Fatal("Invalid API key")
case 429:
log.Printf("Rate limited. Retry after: %s", apiErr.RetryAfter)
case 500, 503:
log.Printf("Perplexity service error: %s", apiErr.Message)
}
}
}
resp, err := client.Chat("sonar-pro").
System("You are a thorough research assistant. Always cite your sources.").
User("What are the latest breakthroughs in nuclear fusion energy?").
GetResponse(ctx)
resp, err := client.Chat("sonar").
System("Summarize the key points from recent news.").
User("What happened in technology this week?").
GetResponse(ctx)
resp, err := client.Chat("sonar-pro").
System("Verify claims using current sources. Cite evidence.").
User("Is it true that Go is the fastest growing programming language in 2024?").
GetResponse(ctx)
TaskRecommended Model
Quick answerssonar
Deep researchsonar-pro
Complex reasoningsonar-reasoning-pro
Cost-effectivesonar
// Always process citations for factual queries
if len(resp.Citations) == 0 {
log.Println("Warning: No sources cited for factual claim")
}
// Verify important information
for _, c := range resp.Citations {
if strings.Contains(c.URL, "gov") || strings.Contains(c.URL, "edu") {
fmt.Println("Authoritative source:", c.URL)
}
}
client := core.NewClient(provider,
core.WithRetryPolicy(&core.RetryPolicy{
MaxRetries: 3,
InitialInterval: 1 * time.Second,
MaxInterval: 30 * time.Second,
BackoffMultiplier: 2.0,
RetryOn: []int{429, 500, 503},
}),
)
  • Uses Authorization: Bearer for authentication
  • Perplexity models automatically search the web for current information
  • Citations are included in responses for fact verification
  • Best for questions requiring real-time or current information
  • The provider is safe for concurrent use after construction

Tools Guide

Combine search with tools. Tools →

Providers Overview

Compare all available providers. Providers →