Interface LmChatOpenAiNodeParameters

Source
interface LmChatOpenAiNodeParameters {
    builtInTools?: {
        codeInterpreter?: boolean;
        fileSearch?: {
            filters?: string;
            maxResults?: number;
            vectorStoreIds: string;
        };
        webSearch?: {
            allowedDomains?: string;
            city?: string;
            country?: string;
            region?: string;
            searchContextSize?: "low"
            | "high"
            | "medium";
        };
    };
    model?: string
    | { mode: "id" | "list"; value: string };
    options?: {
        baseURL?: string;
        conversationId?: string;
        frequencyPenalty?: number;
        maxRetries?: number;
        maxTokens?: number;
        metadata?: string;
        presencePenalty?: number;
        promptCacheKey?: string;
        promptConfig?: {
            promptOptions: {
                promptId?: string;
                variables?: string;
                version?: string;
            };
        };
        reasoningEffort?: "low"
        | "high"
        | "medium";
        responseFormat?: "text" | "json_object";
        safetyIdentifier?: string;
        serviceTier?: "auto" | "default" | "priority" | "flex";
        temperature?: number;
        textFormat?: {
            textOptions: {
                description?: string;
                name?: string;
                requiredNotice?: string;
                schema?: string;
                strict?: boolean;
                type?: "text" | "json_object" | "json_schema";
                verbosity?: "low" | "high" | "medium";
            };
        };
        timeout?: number;
        topLogprobs?: number;
        topP?: number;
    };
    responsesApiEnabled?: boolean;
}

Properties§

Source§

readonly builtInTools?: {
    codeInterpreter?: boolean;
    fileSearch?: {
        filters?: string;
        maxResults?: number;
        vectorStoreIds: string;
    };
    webSearch?: {
        allowedDomains?: string;
        city?: string;
        country?: string;
        region?: string;
        searchContextSize?: "low"
        | "high"
        | "medium";
    };
}

Default: {}

Source§

readonly model?: string | { mode: "id" | "list"; value: string }

The model which will generate the completion. Learn more. Default: "gpt-4o-mini" Type options: {"loadOptions":{"routing":{"request":{"method":"GET","url":"={{ $parameter.options?.baseURL?.split("/").slice(-1).pop() || $credentials?.url?.split("/").slice(-1).pop() || "v1" }}/models"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"data"}},{"type":"filter","properties":{"pass":"={{\n\t\t\t\t\t\t\t\t\t\t\t\t($parameter.options?.baseURL && !$parameter.options?.baseURL?.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t($credentials?.url && !$credentials.url.startsWith('https://api.openai.com/')) ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('ft:') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o1') ||\n\t\t\t\t\t\t\t\t\t\t\t\t$responseItem.id.startsWith('o3') ||\n\t\t\t\t\t\t\t\t\t\t\t\t($responseItem.id.startsWith('gpt-') && !$responseItem.id.includes('instruct'))\n\t\t\t\t\t\t\t\t\t\t\t}}"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.id}}","value":"={{$responseItem.id}}"}},{"type":"sort","properties":{"key":"name"}}]}}}}

Source§

readonly options?: {
    baseURL?: string;
    conversationId?: string;
    frequencyPenalty?: number;
    maxRetries?: number;
    maxTokens?: number;
    metadata?: string;
    presencePenalty?: number;
    promptCacheKey?: string;
    promptConfig?: {
        promptOptions: {
            promptId?: string;
            variables?: string;
            version?: string;
        };
    };
    reasoningEffort?: "low"
    | "high"
    | "medium";
    responseFormat?: "text" | "json_object";
    safetyIdentifier?: string;
    serviceTier?: "auto" | "default" | "priority" | "flex";
    temperature?: number;
    textFormat?: {
        textOptions: {
            description?: string;
            name?: string;
            requiredNotice?: string;
            schema?: string;
            strict?: boolean;
            type?: "text" | "json_object" | "json_schema";
            verbosity?: "low" | "high" | "medium";
        };
    };
    timeout?: number;
    topLogprobs?: number;
    topP?: number;
}

Additional options to add Default: {}

Source§

readonly responsesApiEnabled?: boolean

Whether to use the Responses API to generate the response. Learn more. Default: true