Blue Guardrails

Send your traces from typescript

Instrument your TypeScript application with OpenTelemetry and send traces to Blue Guardrails for hallucination detection.

This guide shows you how to send OpenTelemetry traces from a TypeScript application to Blue Guardrails. The examples use OpenInference instrumentation to capture traces from different SDKs.

Prerequisites

  • A Blue Guardrails account with a workspace and credits
  • An API key (workspace-scoped or user-scoped)
  • Node.js 18+ installed

Install dependencies

npm install openai @arizeai/openinference-instrumentation-openai \
  @opentelemetry/api @opentelemetry/exporter-trace-otlp-proto \
  @opentelemetry/resources @opentelemetry/sdk-trace-base \
  @opentelemetry/sdk-trace-node dotenv
npm install @anthropic-ai/sdk @arizeai/openinference-instrumentation-anthropic \
  @opentelemetry/api @opentelemetry/exporter-trace-otlp-proto \
  @opentelemetry/resources @opentelemetry/sdk-trace-base \
  @opentelemetry/sdk-trace-node dotenv
npm install @anthropic-ai/claude-agent-sdk @arizeai/openinference-instrumentation-claude-agent-sdk \
  @opentelemetry/api @opentelemetry/exporter-trace-otlp-proto \
  @opentelemetry/resources @opentelemetry/sdk-node \
  @opentelemetry/sdk-trace-node dotenv

Full example

Create a file called trace_openai.ts and paste the following:

import "dotenv/config";

import { OpenAIInstrumentation } from "@arizeai/openinference-instrumentation-openai";
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto";
import { resourceFromAttributes } from "@opentelemetry/resources";
import { NodeTracerProvider, SimpleSpanProcessor } from "@opentelemetry/sdk-trace-node";
import OpenAI from "openai";

function parseResourceAttributes(raw: string | undefined): Record<string, string> {
  const attrs: Record<string, string> = {};

  if (!raw) {
    return attrs;
  }

  for (const item of raw.split(",")) {
    const [key, ...valueParts] = item.split("=");
    if (!key || valueParts.length === 0) {
      continue;
    }
    attrs[key.trim()] = valueParts.join("=").trim();
  }

  return attrs;
}

function parseOtlpHeaders(raw: string | undefined): Record<string, string> {
  const headers: Record<string, string> = {};

  if (!raw) {
    return headers;
  }

  for (const item of raw.split(",")) {
    const [key, ...valueParts] = item.split("=");
    if (!key || valueParts.length === 0) {
      continue;
    }
    headers[key.trim()] = valueParts.join("=").trim();
  }

  return headers;
}

const otlpEndpoint = process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT ?? "http://localhost:8000/v1/traces";
const otlpHeaders = parseOtlpHeaders(process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS);
const workspaceId = process.env.BLUE_GUARDRAILS_WORKSPACE_ID ?? process.env.WORKSPACE_ID;
if (workspaceId) {
  otlpHeaders["x-workspace-id"] = workspaceId;
}
const apiKey = process.env.BLUE_GUARDRAILS_API_KEY;
if (apiKey) {
  otlpHeaders.Authorization = `Bearer ${apiKey}`;
}

const provider = new NodeTracerProvider({
  resource: resourceFromAttributes({
    ...parseResourceAttributes(process.env.OTEL_RESOURCE_ATTRIBUTES),
    "service.name": "otel-traces-typescript",
  }),
  spanProcessors: [
    new SimpleSpanProcessor(new OTLPTraceExporter({ url: otlpEndpoint, headers: otlpHeaders })),
  ],
});

provider.register();

const instrumentation = new OpenAIInstrumentation();
instrumentation.manuallyInstrument(OpenAI);

const client = new OpenAI();

try {
  const response = await client.chat.completions.create({
    model: "gpt-5-mini",
    messages: [
      { role: "system", content: "Answer the question based on the context." },
      { role: "user", content: "Context: Revenue in Q1 was $85 Billion." },
      { role: "user", content: "What was Google's revenue in Q1 2025?" },
    ],
  });

  console.log(response.choices[0].message.content);
} finally {
  await provider.forceFlush();
  await provider.shutdown();
}

Create a file called trace_anthropic.ts and paste the following:

import "dotenv/config";

import Anthropic from "@anthropic-ai/sdk";
import { AnthropicInstrumentation } from "@arizeai/openinference-instrumentation-anthropic";
import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto";
import { resourceFromAttributes } from "@opentelemetry/resources";
import { NodeTracerProvider, SimpleSpanProcessor } from "@opentelemetry/sdk-trace-node";

function parseKeyValuePairs(raw: string | undefined): Record<string, string> {
  const result: Record<string, string> = {};
  if (!raw) return result;
  for (const item of raw.split(",")) {
    const [key, ...valueParts] = item.split("=");
    if (!key || valueParts.length === 0) continue;
    result[key.trim()] = valueParts.join("=").trim();
  }
  return result;
}

const otlpEndpoint = process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT ?? "http://localhost:8000/v1/traces";
const otlpHeaders = parseKeyValuePairs(process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS);
const workspaceId = process.env.BLUE_GUARDRAILS_WORKSPACE_ID ?? process.env.WORKSPACE_ID;
if (workspaceId) {
  otlpHeaders["x-workspace-id"] = workspaceId;
}
const apiKey = process.env.BLUE_GUARDRAILS_API_KEY;
if (apiKey) {
  otlpHeaders.Authorization = `Bearer ${apiKey}`;
}

const provider = new NodeTracerProvider({
  resource: resourceFromAttributes({
    ...parseKeyValuePairs(process.env.OTEL_RESOURCE_ATTRIBUTES),
    "service.name": "otel-traces-typescript",
  }),
  spanProcessors: [
    new SimpleSpanProcessor(new OTLPTraceExporter({ url: otlpEndpoint, headers: otlpHeaders })),
  ],
});

provider.register();

const instrumentation = new AnthropicInstrumentation({ tracerProvider: provider });
instrumentation.manuallyInstrument(Anthropic);

const client = new Anthropic();

try {
  const response = await client.messages.create({
    max_tokens: 1000,
    model: "claude-haiku-4-5",
    system: "Answer the question based on the context.",
    messages: [
      { role: "user", content: "Context: Revenue in Q1 was $85 Billion." },
      { role: "user", content: "What was Google's revenue in Q1 2025?" },
    ],
  });

  if (response.content[0].type === "text") {
    console.log(response.content[0].text);
  }
} finally {
  await provider.forceFlush();
  await provider.shutdown();
}

Create a file called trace_claude_agent.ts and paste the following:

import "dotenv/config";

import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto";
import { resourceFromAttributes } from "@opentelemetry/resources";
import { NodeSDK } from "@opentelemetry/sdk-node";
import { SimpleSpanProcessor } from "@opentelemetry/sdk-trace-node";
import { ClaudeAgentSDKInstrumentation } from "@arizeai/openinference-instrumentation-claude-agent-sdk";
import * as ClaudeAgentSDKModule from "@anthropic-ai/claude-agent-sdk";

function parseKeyValuePairs(raw: string | undefined): Record<string, string> {
  const result: Record<string, string> = {};
  if (!raw) return result;
  for (const item of raw.split(",")) {
    const [key, ...valueParts] = item.split("=");
    if (!key || valueParts.length === 0) continue;
    result[key.trim()] = valueParts.join("=").trim();
  }
  return result;
}

const otlpEndpoint = process.env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT ?? "http://localhost:8000/v1/traces";
const otlpHeaders = parseKeyValuePairs(process.env.OTEL_EXPORTER_OTLP_TRACES_HEADERS);
const workspaceId = process.env.BLUE_GUARDRAILS_WORKSPACE_ID ?? process.env.WORKSPACE_ID;
if (workspaceId) {
  otlpHeaders["x-workspace-id"] = workspaceId;
}
const apiKey = process.env.BLUE_GUARDRAILS_API_KEY;
if (apiKey) {
  otlpHeaders.Authorization = `Bearer ${apiKey}`;
}

// Create mutable copy — ESM namespace objects are frozen
const ClaudeAgentSDK = { ...ClaudeAgentSDKModule };

const instrumentation = new ClaudeAgentSDKInstrumentation();
instrumentation.manuallyInstrument(ClaudeAgentSDK);

const sdk = new NodeSDK({
  resource: resourceFromAttributes({
    ...parseKeyValuePairs(process.env.OTEL_RESOURCE_ATTRIBUTES),
    "service.name": "otel-traces-typescript",
  }),
  spanProcessors: [
    new SimpleSpanProcessor(new OTLPTraceExporter({ url: otlpEndpoint, headers: otlpHeaders })),
  ],
  instrumentations: [instrumentation],
});

sdk.start();

const { query } = ClaudeAgentSDK;

try {
  for await (const message of query({
    prompt: "Read sample_code.py and convert all numpy-style docstrings to Google-style docstrings. Edit the file in place.",
    options: {
      allowedTools: ["Read", "Edit", "Glob"],
      permissionMode: "acceptEdits",
    },
  })) {
    if (message.type === "assistant" && message.message?.content) {
      for (const block of message.message.content) {
        if ("text" in block) {
          console.log(block.text);
        } else if ("name" in block) {
          console.log(`Tool: ${block.name}`);
        }
      }
    }
  }
} finally {
  await sdk.shutdown();
}

How it works

The examples do three things:

  1. Configure a tracer provider. An OTLP exporter sends spans to the Blue Guardrails traces endpoint. Authentication headers (Authorization and x-workspace-id) are read from environment variables.

  2. Instrument the SDK. OpenInference instrumentation wraps every SDK call in an OpenTelemetry span. The span captures the request messages, model name, token usage, and the response.

  3. Make a traced call. The traced call runs and prints the response. On exit, the provider is flushed and shut down to ensure all spans are exported before the process ends.

Set environment variables

Create a .env file or export the variables directly:

export BLUE_GUARDRAILS_API_KEY="your-api-key"
export BLUE_GUARDRAILS_WORKSPACE_ID="your-workspace-id"
export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="https://api.blueguardrails.com/v1/traces"

Run the example

npx tsx trace_openai.ts
npx tsx trace_anthropic.ts
npx tsx trace_claude_agent.ts

After the script completes, the trace appears in your Blue Guardrails workspace. Blue Guardrails processes the trace and runs hallucination detection on the assistant's response.

On this page