interface LmChatAwsBedrockNodeParameters {
model?: string;
modelSource?: "onDemand" | "inferenceProfile";
options?: {
maxTokensToSample?: number;
temperature?: number;
};
}
interface LmChatAwsBedrockNodeParameters {
model?: string;
modelSource?: "onDemand" | "inferenceProfile";
options?: {
maxTokensToSample?: number;
temperature?: number;
};
}
The model which will generate the completion. Learn more. Type options: {"loadOptionsDependsOn":["modelSource"],"loadOptions":{"routing":{"request":{"method":"GET","url":"/foundation-models?&byOutputModality=TEXT&byInferenceType=ON_DEMAND"},"output":{"postReceive":[{"type":"rootProperty","properties":{"property":"modelSummaries"}},{"type":"setKeyValue","properties":{"name":"={{$responseItem.modelName}}","description":"={{$responseItem.modelArn}}","value":"={{$responseItem.modelId}}"}},{"type":"sort","properties":{"key":"name"}}]}}}}