Go SDK for Tinfoil’s secure AI inference API
New to Go? Start here - Project Setup
# Create a new directory for your project
mkdir my-tinfoil-app
cd my-tinfoil-app
# Initialize a Go module
go mod init my-tinfoil-app
go get github.com/tinfoilsh/tinfoil-go
tinfoil-go
currently relies on a specific feature in go-sev-guest
that hasn’t been upstreamed yet. This requires adding the following line to your go.mod
:
replace github.com/google/go-sev-guest => github.com/tinfoilsh/go-sev-guest v0.0.0-20250704193550-c725e6216008
// Before (OpenAI)
- import (
- "os"
- "github.com/openai/openai-go"
- "github.com/openai/openai-go/option"
- )
-
- client := openai.NewClient(
- option.WithAPIKey(os.Getenv("OPENAI_API_KEY"))
- )
// After (Tinfoil)
+ import (
+ "os"
+ "github.com/openai/openai-go"
+ "github.com/openai/openai-go/option"
+ "github.com/tinfoilsh/tinfoil-go"
+ )
+
+ client, err := tinfoil.NewClient(
+ option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
+ )
tinfoil.NewClient()
returns a standard OpenAI client with built-in security features.
// 1. Create a client
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatal(err)
}
// 2. Use client as you would openai.Client
// see https://pkg.go.dev/github.com/openai/openai-go for API documentation
DeepSeek R1 - deepseek-r1-0528
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/tinfoilsh/tinfoil-go"
)
func main() {
// Configure client for DeepSeek R1
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatal(err)
}
// Example: Complex reasoning task
chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.SystemMessage("You are an expert at solving complex mathematical problems step by step."),
openai.UserMessage("Solve this step by step: If a train travels 120 miles in 2 hours, and then increases its speed by 25% for the next 3 hours, how far does it travel in total?"),
},
Model: "deepseek-r1-0528",
Temperature: openai.Float(0.1),
})
if err != nil {
log.Fatal(err)
}
fmt.Println(chatCompletion.Choices[0].Message.Content)
}
Mistral Small 3.1 24B - mistral-small-3-1-24b
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/tinfoilsh/tinfoil-go"
)
func main() {
// Configure client for Mistral Small 3.1 24B
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatal(err)
}
// Example: Multilingual conversation
chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.UserMessage("Explain the concept of machine learning in both English and French."),
},
Model: "mistral-small-3-1-24b",
Temperature: openai.Float(0.7),
})
if err != nil {
log.Fatal(err)
}
fmt.Println(chatCompletion.Choices[0].Message.Content)
}
Llama 3.3 70B - llama3-3-70b
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/tinfoilsh/tinfoil-go"
)
func main() {
// Configure client for Llama 3.3 70B
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatal(err)
}
// Example: Conversational AI
chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.SystemMessage("You are a helpful assistant that provides detailed explanations."),
openai.UserMessage("What are the key differences between renewable and non-renewable energy sources?"),
},
Model: "llama3-3-70b",
Temperature: openai.Float(0.8),
})
if err != nil {
log.Fatal(err)
}
fmt.Println(chatCompletion.Choices[0].Message.Content)
}
Qwen 2.5 72B - qwen2-5-72b
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/tinfoilsh/tinfoil-go"
)
func main() {
// Configure client for Qwen 2.5 72B
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatal(err)
}
// Example: Code generation and analysis
chatCompletion, err := client.Chat.Completions.New(context.TODO(), openai.ChatCompletionNewParams{
Messages: []openai.ChatCompletionMessageParamUnion{
openai.UserMessage("Write a Go function to calculate the Fibonacci sequence up to n terms, then explain how it works."),
},
Model: "qwen2-5-72b",
Temperature: openai.Float(0.3),
})
if err != nil {
log.Fatal(err)
}
fmt.Println(chatCompletion.Choices[0].Message.Content)
}
Whisper Large V3 Turbo - whisper-large-v3-turbo
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/tinfoilsh/tinfoil-go"
)
func main() {
// Configure client for Whisper Large V3 Turbo
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatal(err)
}
// Example: Audio transcription
audioFile, err := os.Open("meeting_recording.mp3")
if err != nil {
log.Fatal(err)
}
defer audioFile.Close()
transcription, err := client.Audio.Transcriptions.New(context.TODO(), openai.AudioTranscriptionNewParams{
Model: "whisper-large-v3-turbo",
File: openai.File(audioFile, "meeting_recording.mp3", "audio/mpeg"),
Language: openai.String("en"), // Optional: specify language for better accuracy
Prompt: openai.String("This is a business meeting discussing quarterly results"), // Optional: provide context
})
if err != nil {
log.Fatal(err)
}
fmt.Println("Transcription:", transcription.Text)
}
Kokoro - kokoro
package main
import (
"context"
"fmt"
"io"
"log"
"os"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/tinfoilsh/tinfoil-go"
)
func main() {
// Configure client for Kokoro TTS
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatalf("Error creating client: %v", err)
}
textToSpeak := "Welcome to Tinfoil's secure AI platform. Your data remains private and protected."
// Example: Text-to-speech with different voices
// Single voice
response1, err := client.Audio.Speech.New(
context.Background(),
openai.AudioSpeechNewParams{
Model: "kokoro",
Voice: "af_sky",
Input: textToSpeak,
ResponseFormat: openai.AudioSpeechNewParamsResponseFormatMp3,
},
)
if err != nil {
log.Fatalf("Error creating speech: %v", err)
}
defer response1.Body.Close()
out1, err := os.Create("speech_single.mp3")
if err != nil {
log.Fatalf("Error creating output file: %v", err)
}
defer out1.Close()
_, err = io.Copy(out1, response1.Body)
if err != nil {
log.Fatalf("Error writing audio file: %v", err)
}
// Combined voices for richer sound
response2, err := client.Audio.Speech.New(
context.Background(),
openai.AudioSpeechNewParams{
Model: "kokoro",
Voice: "af_sky+af_bella",
Input: textToSpeak,
ResponseFormat: openai.AudioSpeechNewParamsResponseFormatMp3,
},
)
if err != nil {
log.Fatalf("Error creating speech: %v", err)
}
defer response2.Body.Close()
out2, err := os.Create("speech_combined.mp3")
if err != nil {
log.Fatalf("Error creating output file: %v", err)
}
defer out2.Close()
_, err = io.Copy(out2, response2.Body)
if err != nil {
log.Fatalf("Error writing audio file: %v", err)
}
fmt.Println("Speech files generated successfully!")
}
Nomic Embed Text - nomic-embed-text
package main
import (
"context"
"fmt"
"log"
"math"
"os"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
"github.com/tinfoilsh/tinfoil-go"
)
func main() {
// Configure client for Nomic Embed Text
client, err := tinfoil.NewClient(
option.WithAPIKey(os.Getenv("TINFOIL_API_KEY")),
)
if err != nil {
log.Fatal(err)
}
// Example: Generate embeddings for similarity search
documents := []string{
"Artificial intelligence is transforming modern technology.",
"Machine learning enables computers to learn from data.",
"The weather today is sunny and warm.",
"Deep learning uses neural networks with multiple layers.",
}
// Generate embeddings for all documents
var embeddings [][]float64
for _, doc := range documents {
response, err := client.Embeddings.New(context.TODO(), openai.EmbeddingNewParams{
Model: "nomic-embed-text",
Input: openai.EmbeddingNewParamsInputArrayString([]string{doc}),
})
if err != nil {
log.Fatal(err)
}
embeddings = append(embeddings, response.Data[0].Embedding)
}
// Calculate similarity between first two documents
similarity := cosineSimilarity(embeddings[0], embeddings[1])
fmt.Printf("Similarity between first two AI-related documents: %.3f\n", similarity)
fmt.Printf("Embedding dimension: %d\n", len(embeddings[0]))
}
func cosineSimilarity(a, b []float64) float64 {
dotProduct := 0.0
normA := 0.0
normB := 0.0
for i := 0; i < len(a); i++ {
dotProduct += a[i] * b[i]
normA += a[i] * a[i]
normB += b[i] * b[i]
}
return dotProduct / (math.Sqrt(normA) * math.Sqrt(normB))
}
// For manual verification and direct HTTP access, use SecureClient directly
secureClient := tinfoil.NewSecureClient("<enclave>.model.tinfoil.sh", "tinfoilsh/<config-repo>")
// Manual verification
groundTruth, err := secureClient.Verify()
if err != nil {
return fmt.Errorf("verification failed: %w", err)
}
// Get the raw HTTP client
httpClient, err := secureClient.HTTPClient()
if err != nil {
return fmt.Errorf("failed to get HTTP client: %w", err)
}
// Make HTTP requests directly
resp, err := secureClient.Get("/api/status", map[string]string{
"Authorization": "Bearer token",
})