Type Alias WrapAISDKConfig<T>

WrapAISDKConfig: {
    name: undefined | string;
    attachments?: undefined | Attachments;
    child_execution_order?: undefined | number;
    client?: undefined | Client;
    dotted_order?: undefined | string;
    end_time?: undefined | string | number;
    execution_order?: undefined | number;
    extra?: undefined | KVMap;
    id?: undefined | string;
    metadata?: undefined | KVMap;
    on_end?: undefined | (runTree: RunTree) => void;
    parent_run_id?: undefined | string;
    processChildLLMRunInputs?:
        | undefined
        | (inputs: LanguageModelV2CallOptions) => Record<string, unknown>;
    processChildLLMRunOutputs?:
        | undefined
        | (
            outputs: "fullStream" extends keyof Awaited<ReturnType<T>>
                ? AggregatedDoStreamOutput
                : {
                    content: LanguageModelV2Content[];
                    finishReason: LanguageModelV2FinishReason;
                    usage: LanguageModelV2Usage;
                    warnings: LanguageModelV2CallWarning[];
                    providerMetadata?: SharedV2ProviderMetadata;
                    request?: { body?: unknown };
                    response?: LanguageModelV2ResponseMetadata & {
                        body?: unknown;
                        headers?: SharedV2Headers;
                    };
                },
        ) => Record<string, unknown>;
    processInputs?:
        | undefined
        | (inputs: Parameters<T>[0]) => Record<string, unknown>;
    processOutputs?:
        | undefined
        | (outputs: Awaited<ReturnType<T>>) => Record<string, unknown>;
    project_name?: undefined | string;
    reference_example_id?: undefined | string;
    replicas?: undefined | Replica[];
    start_time?: undefined | string | number;
    tags?: undefined | string[];
    trace_id?: undefined | string;
    tracingEnabled?: undefined | boolean;
}

Type Parameters

  • T extends (...args: any[]) => any = (...args: any[]) => any

Type declaration

  • name: undefined | string
  • Optionalattachments?: undefined | Attachments
  • Optionalchild_execution_order?: undefined | number
  • Optionalclient?: undefined | Client
  • Optionaldotted_order?: undefined | string
  • Optionalend_time?: undefined | string | number
  • Optionalexecution_order?: undefined | number
  • Optionalextra?: undefined | KVMap
  • Optionalid?: undefined | string
  • Optionalmetadata?: undefined | KVMap
  • Optionalon_end?: undefined | (runTree: RunTree) => void
  • Optionalparent_run_id?: undefined | string
  • OptionalprocessChildLLMRunInputs?: undefined | (inputs: LanguageModelV2CallOptions) => Record<string, unknown>

    Apply transformations to AI SDK child LLM run inputs before logging. This function should NOT mutate the inputs. Receives both "raw" and LangSmith-suggested "formatted" inputs, and should combine them into a single LangSmith-formatted input.

    import {
    wrapAISDK,
    createLangSmithProviderOptions,
    } from "langsmith/experimental/vercel";
    import * as ai from "ai";
    import { openai } from "@ai-sdk/openai";

    const { generateText } = wrapAISDK(ai);

    const lsConfig = createLangSmithProviderOptions<typeof ai.generateText>({
    processChildLLMRunInputs: (inputs) => {
    const { prompt } = inputs;
    return {
    messages: prompt.map((message) => ({
    ...message,
    content: "REDACTED CHILD INPUTS",
    })),
    };
    },
    });
    const { text } = await generateText({
    model: openai("gpt-5-nano"),
    prompt: "What is the capital of France?",
    providerOptions: {
    langsmith: lsConfig,
    },
    });
  • OptionalprocessChildLLMRunOutputs?:
        | undefined
        | (
            outputs: "fullStream" extends keyof Awaited<ReturnType<T>>
                ? AggregatedDoStreamOutput
                : {
                    content: LanguageModelV2Content[];
                    finishReason: LanguageModelV2FinishReason;
                    usage: LanguageModelV2Usage;
                    warnings: LanguageModelV2CallWarning[];
                    providerMetadata?: SharedV2ProviderMetadata;
                    request?: { body?: unknown };
                    response?: LanguageModelV2ResponseMetadata & {
                        body?: unknown;
                        headers?: SharedV2Headers;
                    };
                },
        ) => Record<string, unknown>

    Apply transformations to AI SDK child LLM run outputs before logging. This function should NOT mutate the outputs. Receives both "raw" and LangSmith-suggested "formatted" outputs, and should combine them into a single LangSmith-formatted output.

    import {
    wrapAISDK,
    createLangSmithProviderOptions,
    } from "langsmith/experimental/vercel";
    import * as ai from "ai";
    import { openai } from "@ai-sdk/openai";

    const { generateText } = wrapAISDK(ai);

    const lsConfig = createLangSmithProviderOptions<typeof ai.generateText>({
    processChildLLMRunOutputs: (outputs) => {
    return {
    providerMetadata: outputs.providerMetadata,
    content: "REDACTED CHILD OUTPUTS",
    role: "assistant",
    };
    },
    });
    const { text } = await generateText({
    model: openai("gpt-5-nano"),
    prompt: "What is the capital of France?",
    providerOptions: {
    langsmith: lsConfig,
    },
    });
  • OptionalprocessInputs?: undefined | (inputs: Parameters<T>[0]) => Record<string, unknown>

    Apply transformations to AI SDK inputs before logging. This function should NOT mutate the inputs. Receives both "raw" and LangSmith-suggested "formatted" inputs, and should combine them into a single LangSmith-formatted input.

    import {
    wrapAISDK,
    createLangSmithProviderOptions,
    } from "langsmith/experimental/vercel";
    import * as ai from "ai";
    import { openai } from "@ai-sdk/openai";

    const { generateText } = wrapAISDK(ai);

    const lsConfig = createLangSmithProviderOptions<typeof ai.generateText>({
    processInputs: (inputs) => {
    const { messages } = inputs;
    return {
    messages: messages?.map((message) => ({
    providerMetadata: message.providerOptions,
    role: "assistant",
    content: "REDACTED",
    })),
    prompt: "REDACTED",
    };
    },
    });
    const { text } = await generateText({
    model: openai("gpt-5-nano"),
    prompt: "What is the capital of France?",
    providerOptions: {
    langsmith: lsConfig,
    },
    });

    This function is not inherited by nested LLM runs or tool calls. Pass processChildLLMRunInputs to override child LLM run input processing or wrap your tool's execute method in a separate traceable for tool calls.

  • OptionalprocessOutputs?: undefined | (outputs: Awaited<ReturnType<T>>) => Record<string, unknown>

    Apply transformations to AI SDK outputs before logging. This function should NOT mutate the outputs. Receives both "raw" and LangSmith-suggested "formatted" outputs, and should combine them into a single LangSmith-formatted output.

    import {
    wrapAISDK,
    createLangSmithProviderOptions,
    } from "langsmith/experimental/vercel";
    import * as ai from "ai";
    import { openai } from "@ai-sdk/openai";

    const { generateText } = wrapAISDK(ai);

    const lsConfig = createLangSmithProviderOptions<typeof ai.generateText>({
    processOutputs: (outputs) => {
    return {
    providerMetadata: outputs.providerMetadata,
    role: "assistant",
    content: "REDACTED",
    };
    },
    });
    const { text } = await generateText({
    model: openai("gpt-5-nano"),
    prompt: "What is the capital of France?",
    providerOptions: {
    langsmith: lsConfig,
    },
    });

    This function is not inherited by nested LLM runs or tool calls. Pass processChildLLMRunOutputs to override child LLM run output processing or wrap your tool's execute method in a separate traceable for tool calls.

  • Optionalproject_name?: undefined | string
  • Optionalreference_example_id?: undefined | string
  • Optionalreplicas?: undefined | Replica[]
  • Optionalstart_time?: undefined | string | number
  • Optionaltags?: undefined | string[]
  • Optionaltrace_id?: undefined | string
  • OptionaltracingEnabled?: undefined | boolean