Quick-Start — Install the SDK and try any of our models in seconds:

from tinfoil import TinfoilAI

client = TinfoilAI(
    api_key="<API_KEY>",  # from tinfoil.sh/dashboard
    enclave="<MODEL>-p.model.tinfoil.sh",  # -p suffix indicates production environment
    repo="tinfoilsh/confidential-<MODEL>-prod",  # required repository configuration
)

chat = client.chat.completions.create(
    model="<MODEL>",  # pick any model from the table below
    messages=[{"role": "user", "content": "Hello!"}]
)
print(chat.choices[0].message.content)

See the table below for available models and their capabilities.

Security Warning: Never share your API key, be careful to not include it in version control systems, and never bundle it in with front-end client code.

Available Models

ModelTypeShort description
DeepSeek R1 70B (deepseek-r1-70b)ChatHigh-performance reasoning model with exceptional benchmarks
Mistral Small 3.1 24B (mistral-small-3-1-24b)ChatAdvanced multimodal model with extended context window
Llama 3.3 70B (llama3-3-70b)ChatMultilingual language model optimised for dialogue and reasoning
Llama Guard 3 1B (llama-guard3-1b)ChatSafety-focused model for content filtering and moderation
Whisper Large V3 Turbo (whisper-large-v3-turbo)AudioFast, accurate speech-to-text transcription
Nomic Embed Text (nomic-embed-text)EmbeddingOpen-source text embedding model that beats OpenAI Ada on benchmarks

Code examples by model type

Below you can find minimal examples for each client library. Replace <YOUR_API_KEY> with your key. The enclave and repo parameters are required for all API calls.

Chat models

Compatible models: deepseek-r1-70b, mistral-small-3-1-24b, llama3-3-70b, llama-guard3-1b

Python

from tinfoil import TinfoilAI

client = TinfoilAI(
    enclave="<MODEL>-p.model.tinfoil.sh",  # or the enclave of your chosen model
    repo="tinfoilsh/confidential-<MODEL>-prod",
    api_key="<YOUR_API_KEY>",
)

chat_completion = client.chat.completions.create(
    messages=[{"role": "user", "content": "Hello!"}],
    model="<MODEL>",
)
print(chat_completion.choices[0].message.content)

Node

import { TinfoilAI } from "tinfoil";

const client = new TinfoilAI({
  enclave: "<MODEL>-p.model.tinfoil.sh",
  repo: "tinfoilsh/confidential-<MODEL>-prod",
  apiKey: "<YOUR_API_KEY>",
});

const completion = await client.chat.completions.create({
  messages: [{ role: "user", content: "Hello!" }],
  model: "<MODEL>",
});

console.log(completion.choices[0].message.content);

Swift

import TinfoilKit

let client = TinfoilAI(
    apiKey: "<YOUR_API_KEY>",
    enclave: "<MODEL>-p.model.tinfoil.sh",
    repo: "tinfoilsh/confidential-<MODEL>-prod"
)

let completion = try await client.chat.completions.create(
    messages: [ .user(content: "Hello!") ],
    model: "<MODEL>"
)

print(completion.choices[0].message.content)

Go

import (
    "context"
    "log"
    "github.com/tinfoilsh/tinfoil-go" // imported as tinfoil
)

client := tinfoil.NewSecureClient(
    "<MODEL>-p.model.tinfoil.sh",
    "tinfoilsh/confidential-<MODEL>-prod",
)

chat, err := client.Chat.Completions.New(
    context.TODO(),
    openai.ChatCompletionNewParams{
        Messages: openai.F([]openai.ChatCompletionMessageParamUnion{
            openai.UserMessage("Hello!"),
        }),
        Model: openai.F("<MODEL>"),
    },
)
if err != nil {
    log.Fatal(err)
}

log.Println(chat.Choices[0].Message.Content)

Audio models

Compatible models: whisper-large-v3-turbo

CLI

# Quick transcription using the CLI
tinfoil audio -k <API_KEY> -m whisper-large-v3-turbo -f /path/to/audio.mp3

Python

from tinfoil import TinfoilAI

client = TinfoilAI(
    enclave="whisper-large-v3-turbo-p.model.tinfoil.sh",
    repo="tinfoilsh/confidential-whisper-large-v3-turbo-prod",
    api_key="<YOUR_API_KEY>",
)

audio_file = open("audio.mp3", "rb")
transcription = client.audio.transcriptions.create(
    model="whisper-large-v3-turbo",
    file=audio_file
)
print(transcription.text)

Node

import { TinfoilAI } from "tinfoil";
import fs from 'fs';

const client = new TinfoilAI({
  enclave: "whisper-large-v3-turbo-p.model.tinfoil.sh",
  repo: "tinfoilsh/confidential-whisper-large-v3-turbo-prod",
  apiKey: "<YOUR_API_KEY>",
});

const audioFile = fs.createReadStream('audio.mp3');
const transcription = await client.audio.transcriptions.create({
  file: audioFile,
  model: "whisper-large-v3-turbo"
});

console.log(transcription.text);

Swift

import TinfoilKit
import Foundation

let client = TinfoilAI(
    apiKey: "<YOUR_API_KEY>",
    enclave: "whisper-large-v3-turbo-p.model.tinfoil.sh",
    repo: "tinfoilsh/confidential-whisper-large-v3-turbo-prod"
)

// Get audio file URL
let audioURL = URL(fileURLWithPath: "path/to/audio.mp3")
let audioData = try Data(contentsOf: audioURL)

let transcription = try await client.audio.transcriptions.create(
    model: "whisper-large-v3-turbo",
    file: audioData,
    fileName: "audio.mp3"
)

print(transcription.text)

Go

import (
    "context"
    "fmt"
    "os"
    "github.com/tinfoilsh/tinfoil-go"
)

client := tinfoil.NewSecureClient(
    "whisper-large-v3-turbo-p.model.tinfoil.sh",
    "tinfoilsh/confidential-whisper-large-v3-turbo-prod",
)

file, _ := os.Open("audio.mp3")
defer file.Close()

transcription, err := client.Audio.Transcriptions.Create(
    context.TODO(),
    file,
    "audio.mp3",
    openai.AudioTranscriptionCreateParams{
        Model: openai.F("whisper-large-v3-turbo"),
    },
)

fmt.Println(transcription.Text)

Embedding models

Compatible models: nomic-embed-text

Python

from tinfoil import TinfoilAI

client = TinfoilAI(
    enclave="nomic-embed-text-p.model.tinfoil.sh",
    repo="tinfoilsh/confidential-nomic-embed-text-prod",
    api_key="<YOUR_API_KEY>",
)

embedding = client.embeddings.create(
    model="nomic-embed-text",
    input="The food was delicious and the waiter..."
)
print(embedding.data[0].embedding)

Node

import { TinfoilAI } from "tinfoil";

const client = new TinfoilAI({
  enclave: "nomic-embed-text-p.model.tinfoil.sh",
  repo: "tinfoilsh/confidential-nomic-embed-text-prod",
  apiKey: "<YOUR_API_KEY>",
});

const embedding = await client.embeddings.create({
  input: "The food was delicious and the waiter...",
  model: "nomic-embed-text"
});

console.log(embedding.data[0].embedding);

Swift

import TinfoilKit

let client = TinfoilAI(
    apiKey: "<YOUR_API_KEY>",
    enclave: "nomic-embed-text-p.model.tinfoil.sh",
    repo: "tinfoilsh/confidential-nomic-embed-text-prod"
)

let embedding = try await client.embeddings.create(
    model: "nomic-embed-text",
    input: "The food was delicious and the waiter..."
)

print(embedding.data[0].embedding)

Go

import (
    "context"
    "fmt"
    "github.com/tinfoilsh/tinfoil-go"
)

client := tinfoil.NewSecureClient(
    "nomic-embed-text-p.model.tinfoil.sh",
    "tinfoilsh/confidential-nomic-embed-text-prod",
)

embedding, err := client.Embeddings.Create(
    context.TODO(),
    openai.EmbeddingCreateParams{
        Input: []string{"The food was delicious and the waiter..."},
        Model: openai.F("nomic-embed-text"),
    },
)

fmt.Println(embedding.Data[0].Embedding)