diff --git a/README.md b/README.md index 296d50f..8c8c7bd 100644 --- a/README.md +++ b/README.md @@ -21,104 +21,11 @@ go get github.com/braintrustdata/braintrust-sdk-go export BRAINTRUST_API_KEY="your-api-key" # Get from https://www.braintrust.dev/app/settings ``` -## Quick Start - -Braintrust uses [OpenTelemetry](https://opentelemetry.io/) for distributed tracing. Set up a TracerProvider and initialize the client: - -```go -package main - -import ( - "context" - "log" - - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/sdk/trace" - - "github.com/braintrustdata/braintrust-sdk-go" -) - -func main() { - tp := trace.NewTracerProvider() - defer tp.Shutdown(context.Background()) - otel.SetTracerProvider(tp) - - client, err := braintrust.New(tp, braintrust.WithProject("my-project")) - if err != nil { - log.Fatal(err) - } - _ = client // Your client is ready to use -} -``` - -## Usage - -### Evaluations - -Run systematic evaluations with custom test cases and scoring functions: - -```go -package main - -import ( - "context" - "log" - - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/sdk/trace" - - "github.com/braintrustdata/braintrust-sdk-go" - "github.com/braintrustdata/braintrust-sdk-go/eval" -) - -func main() { - ctx := context.Background() - - // Set up OpenTelemetry tracer - tp := trace.NewTracerProvider() - defer tp.Shutdown(ctx) - otel.SetTracerProvider(tp) - - // Initialize Braintrust - client, err := braintrust.New(tp) - if err != nil { - log.Fatal(err) - } - - // Create an evaluator with your task's input and output types - evaluator := braintrust.NewEvaluator[string, string](client) - - // Run an evaluation - _, err = evaluator.Run(ctx, eval.Opts[string, string]{ - Experiment: "greeting-experiment", - Dataset: eval.NewDataset([]eval.Case[string, string]{ - {Input: "World", Expected: "Hello World"}, - {Input: "Alice", Expected: "Hello Alice"}, - }), - Task: eval.T(func(ctx context.Context, input string) (string, error) { - return "Hello " + input, nil - }), - Scorers: []eval.Scorer[string, string]{ - eval.NewScorer("exact_match", func(ctx context.Context, r eval.TaskResult[string, string]) (eval.Scores, error) { - score := 0.0 - if r.Expected == r.Output { - score = 1.0 - } - return eval.S(score), nil - }), - }, - }) - if err != nil { - log.Fatal(err) - } -} -``` - -### Tracing LLM Calls +## Instrumentation Trace LLM calls with **automatic** or **manual** instrumentation. -#### Automatic Instrumentation (Recommended) +### Automatic Instrumentation Use [Orchestrion](https://github.com/DataDog/orchestrion) to automatically inject tracing at compile time—no code changes required. @@ -152,63 +59,24 @@ import ( **3. Build with orchestrion:** ```bash +# Build with orchestrion orchestrion go build ./... + +# Or configure GOFLAGS to use orchestrion automatically +export GOFLAGS="-toolexec='orchestrion toolexec'" +go build ./... ``` That's it! Your LLM client calls are now automatically traced. No middleware or wrapper code needed in your application. -#### Manual Instrumentation +### Manual Instrumentation -Alternatively, add tracing middleware explicitly to your clients: +If you prefer explicit control, you can add tracing middleware manually to your LLM clients. See the [Manual Instrumentation Guide](./trace/contrib/README.md) for detailed examples with OpenAI, Anthropic, Google Gemini, and other providers. -**OpenAI:** -```go -package main +## Evaluations -import ( - "context" - "log" +Run [evals](https://www.braintrust.dev/docs/guides/evals) with custom test cases and scoring functions: - "github.com/openai/openai-go" - "github.com/openai/openai-go/option" - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/sdk/trace" - - "github.com/braintrustdata/braintrust-sdk-go" - traceopenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/openai" -) - -func main() { - // Set up OpenTelemetry tracer - tp := trace.NewTracerProvider() - defer tp.Shutdown(context.Background()) - otel.SetTracerProvider(tp) - - // Initialize Braintrust - _, err := braintrust.New(tp) - if err != nil { - log.Fatal(err) - } - - // Create OpenAI client with tracing middleware - client := openai.NewClient( - option.WithMiddleware(traceopenai.NewMiddleware()), - ) - - // Make API calls - they'll be automatically traced and logged to Braintrust - _, err = client.Chat.Completions.New(context.Background(), openai.ChatCompletionNewParams{ - Messages: []openai.ChatCompletionMessageParamUnion{ - openai.UserMessage("Hello!"), - }, - Model: openai.ChatModelGPT4oMini, - }) - if err != nil { - log.Fatal(err) - } -} -``` - -**Anthropic:** ```go package main @@ -216,41 +84,49 @@ import ( "context" "log" - "github.com/anthropics/anthropic-sdk-go" - "github.com/anthropics/anthropic-sdk-go/option" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/sdk/trace" "github.com/braintrustdata/braintrust-sdk-go" - traceanthropic "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/anthropic" + "github.com/braintrustdata/braintrust-sdk-go/eval" ) func main() { + ctx := context.Background() + // Set up OpenTelemetry tracer tp := trace.NewTracerProvider() - defer tp.Shutdown(context.Background()) + defer tp.Shutdown(ctx) otel.SetTracerProvider(tp) // Initialize Braintrust - _, err := braintrust.New(tp, - braintrust.WithProject("my-project"), - ) + client, err := braintrust.New(tp) if err != nil { log.Fatal(err) } - // Create Anthropic client with tracing middleware - client := anthropic.NewClient( - option.WithMiddleware(traceanthropic.NewMiddleware()), - ) + // Create an evaluator with your task's input and output types + evaluator := braintrust.NewEvaluator[string, string](client) - // Make API calls - they'll be automatically traced and logged to Braintrust - _, err = client.Messages.New(context.Background(), anthropic.MessageNewParams{ - Model: anthropic.ModelClaude3_7SonnetLatest, - Messages: []anthropic.MessageParam{ - anthropic.NewUserMessage(anthropic.NewTextBlock("Hello!")), + // Run an evaluation + _, err = evaluator.Run(ctx, eval.Opts[string, string]{ + Experiment: "greeting-experiment", + Dataset: eval.NewDataset([]eval.Case[string, string]{ + {Input: "World", Expected: "Hello World"}, + {Input: "Alice", Expected: "Hello Alice"}, + }), + Task: eval.T(func(ctx context.Context, input string) (string, error) { + return "Hello " + input, nil + }), + Scorers: []eval.Scorer[string, string]{ + eval.NewScorer("exact_match", func(ctx context.Context, r eval.TaskResult[string, string]) (eval.Scores, error) { + score := 0.0 + if r.Expected == r.Output { + score = 1.0 + } + return eval.S(score), nil + }), }, - MaxTokens: 1024, }) if err != nil { log.Fatal(err) @@ -258,7 +134,7 @@ func main() { } ``` -### API Client +## API Client Manage Braintrust resources programmatically: @@ -327,62 +203,6 @@ func main() { } ``` -**Google Gemini:** -```go -package main - -import ( - "context" - "log" - "os" - - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/sdk/trace" - "google.golang.org/genai" - - "github.com/braintrustdata/braintrust-sdk-go" - tracegenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/genai" -) - -func main() { - // Set up OpenTelemetry tracer - tp := trace.NewTracerProvider() - defer tp.Shutdown(context.Background()) - otel.SetTracerProvider(tp) - - // Initialize Braintrust - _, err := braintrust.New(tp, - braintrust.WithProject("my-project"), - ) - if err != nil { - log.Fatal(err) - } - - // Create Gemini client with tracing - client, err := genai.NewClient(context.Background(), &genai.ClientConfig{ - HTTPClient: tracegenai.Client(), - APIKey: os.Getenv("GOOGLE_API_KEY"), - Backend: genai.BackendGeminiAPI, - }) - if err != nil { - log.Fatal(err) - } - - // Make API calls - they'll be automatically traced and logged to Braintrust - _, err = client.Models.GenerateContent(context.Background(), - "gemini-1.5-flash", - genai.Text("Hello!"), - nil, - ) - if err != nil { - log.Fatal(err) - } -} -``` - -**LangChainGo:** -The SDK provides comprehensive tracing for [LangChainGo](https://github.com/tmc/langchaingo) applications. See [`examples/langchaingo`](./examples/langchaingo/main.go) for examples. - ## Examples Complete working examples are available in [`examples/`](./examples/): diff --git a/trace/contrib/README.md b/trace/contrib/README.md new file mode 100644 index 0000000..87b8743 --- /dev/null +++ b/trace/contrib/README.md @@ -0,0 +1,115 @@ +# Manual Instrumentation + +This guide shows how to manually add tracing middleware to your LLM clients. For zero-code instrumentation, see [Automatic Instrumentation](../../README.md#automatic-instrumentation) in the main README. + +## Prerequisites + +Set up OpenTelemetry and initialize Braintrust: + +```go +import ( + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/sdk/trace" + "github.com/braintrustdata/braintrust-sdk-go" +) + +func main() { + tp := trace.NewTracerProvider() + defer tp.Shutdown(context.Background()) + otel.SetTracerProvider(tp) + + _, err := braintrust.New(tp, braintrust.WithProject("my-project")) + if err != nil { + log.Fatal(err) + } + // Now add tracing middleware to your LLM clients below +} +``` + +## OpenAI (openai-go) + +```go +import ( + "github.com/openai/openai-go" + "github.com/openai/openai-go/option" + traceopenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/openai" +) + +client := openai.NewClient( + option.WithMiddleware(traceopenai.NewMiddleware()), +) + +_, err := client.Chat.Completions.New(ctx, openai.ChatCompletionNewParams{ + Messages: []openai.ChatCompletionMessageParamUnion{ + openai.UserMessage("Hello!"), + }, + Model: openai.ChatModelGPT4oMini, +}) +``` + +## Anthropic + +```go +import ( + "github.com/anthropics/anthropic-sdk-go" + "github.com/anthropics/anthropic-sdk-go/option" + traceanthropic "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/anthropic" +) + +client := anthropic.NewClient( + option.WithMiddleware(traceanthropic.NewMiddleware()), +) + +_, err := client.Messages.New(ctx, anthropic.MessageNewParams{ + Model: anthropic.ModelClaude3_7SonnetLatest, + Messages: []anthropic.MessageParam{ + anthropic.NewUserMessage(anthropic.NewTextBlock("Hello!")), + }, + MaxTokens: 1024, +}) +``` + +## Google Gemini + +```go +import ( + "google.golang.org/genai" + tracegenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/genai" +) + +client, err := genai.NewClient(ctx, &genai.ClientConfig{ + HTTPClient: tracegenai.Client(), + APIKey: os.Getenv("GOOGLE_API_KEY"), + Backend: genai.BackendGeminiAPI, +}) + +_, err = client.Models.GenerateContent(ctx, + "gemini-1.5-flash", + genai.Text("Hello!"), + nil, +) +``` + +## sashabaranov/go-openai + +```go +import ( + "github.com/sashabaranov/go-openai" + traceopenai "github.com/braintrustdata/braintrust-sdk-go/trace/contrib/github.com/sashabaranov/go-openai" +) + +config := openai.DefaultConfig(os.Getenv("OPENAI_API_KEY")) +config.HTTPClient = traceopenai.Client() +client := openai.NewClientWithConfig(config) + +_, err := client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{ + Model: openai.GPT4oMini, + Messages: []openai.ChatCompletionMessage{ + {Role: openai.ChatMessageRoleUser, Content: "Hello!"}, + }, +}) +``` + +## LangChainGo + +See [`examples/langchaingo`](../../examples/langchaingo/main.go) for LangChainGo integration with callback-based tracing.