AWS.Tools.BedrockRuntime.XML

<?xml version="1.0"?>
<doc>
    <assembly>
        <name>AWS.Tools.BedrockRuntime</name>
    </assembly>
    <members>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeCmdlet">
            <summary>
            Retrieve information about an asynchronous invocation.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeCmdlet.InvocationArn">
            <summary>
            <para>
            <para>The invocation's ARN.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is '*'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.GetAsyncInvokeResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.GetAsyncInvokeResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeCmdlet.PassThru">
            <summary>
            Changes the cmdlet behavior to return the value passed to the InvocationArn parameter.
            The -PassThru parameter is deprecated, use -Select '^InvocationArn' instead. This parameter will be removed in a future version.
            </summary>
        </member>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet">
            <summary>
            Lists asynchronous invocations.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.SortBy">
            <summary>
            <para>
            <para>How to sort the response.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.SortOrder">
            <summary>
            <para>
            <para>The sorting order for the response.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.StatusEqual">
            <summary>
            <para>
            <para>Filter invocations by status.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.SubmitTimeAfter">
            <summary>
            <para>
            <para>Include invocations submitted after this time.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.SubmitTimeBefore">
            <summary>
            <para>
            <para>Include invocations submitted before this time.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.MaxResult">
            <summary>
            <para>
            <para>The maximum number of invocations to return in one page of results.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.NextToken">
            <summary>
            <para>
            <para>Specify the pagination token from a previous request to retrieve the next page of
            results.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.GetBDRRAsyncInvokeListCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is 'AsyncInvokeSummaries'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.ListAsyncInvokesResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.ListAsyncInvokesResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet">
            <summary>
            Sends messages to the specified Amazon Bedrock model. <c>Converse</c> provides a consistent
            interface that works with all models that support messages. This allows you to write
            code once and use it with different models. If a model has unique inference parameters,
            you can also pass those unique parameters to the model.
             
              
            <para>
            Amazon Bedrock doesn't store any text, images, or documents that you provide as content.
            The data is only used to generate the response.
            </para><para>
            You can submit a prompt by including it in the <c>messages</c> field, specifying the
            <c>modelId</c> of a foundation model or inference profile to run inference on it,
            and including any other fields that are relevant to your use case.
            </para><para>
            You can also submit a prompt from Prompt management by specifying the ARN of the prompt
            version and including a map of variables to values in the <c>promptVariables</c> field.
            You can append more messages to the prompt by using the <c>messages</c> field. If
            you use a prompt from Prompt management, you can't include the following fields in
            the request: <c>additionalModelRequestFields</c>, <c>inferenceConfig</c>, <c>system</c>,
            or <c>toolConfig</c>. Instead, these fields must be defined through Prompt management.
            For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management-use.html">Use
            a prompt from Prompt management</a>.
            </para><para>
            For information about the Converse API, see <i>Use the Converse API</i> in the <i>Amazon
            Bedrock User Guide</i>. To use a guardrail, see <i>Use a guardrail with the Converse
            API</i> in the <i>Amazon Bedrock User Guide</i>. To use a tool with a model, see <i>Tool
            use (Function calling)</i> in the <i>Amazon Bedrock User Guide</i></para><para>
            For example code, see <i>Converse API examples</i> in the <i>Amazon Bedrock User Guide</i>.
             
            </para><para>
            This operation requires permission for the <c>bedrock:InvokeModel</c> action.
            </para><important><para>
            To deny all inference access to resources that you specify in the modelId field, you
            need to deny access to the <c>bedrock:InvokeModel</c> and <c>bedrock:InvokeModelWithResponseStream</c>
            actions. Doing this also denies access to the resource through the base inference
            actions (<a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModel.html">InvokeModel</a>
            and <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModelWithResponseStream.html">InvokeModelWithResponseStream</a>).
            For more information see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference">Deny
            access for inference on specific models</a>.
            </para></important><para>
            For troubleshooting some of the common errors you might encounter when using the <c>Converse</c>
            API, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html">Troubleshooting
            Amazon Bedrock API Error Codes</a> in the Amazon Bedrock User Guide
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.AdditionalModelRequestField">
            <summary>
            <para>
            <para>Additional inference parameters that the model supports, beyond the base set of inference
            parameters that <c>Converse</c> and <c>ConverseStream</c> support in the <c>inferenceConfig</c>
            field. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Model
            parameters</a>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.AdditionalModelResponseFieldPath">
            <summary>
            <para>
            <para>Additional model parameters field paths to return in the response. <c>Converse</c>
            and <c>ConverseStream</c> return the requested fields as a JSON Pointer object in
            the <c>additionalModelResponseFields</c> field. The following is example JSON for
            <c>additionalModelResponseFieldPaths</c>.</para><para><c>[ "/stop_sequence" ]</c></para><para>For information about the JSON Pointer syntax, see the <a href="https://datatracker.ietf.org/doc/html/rfc6901">Internet
            Engineering Task Force (IETF)</a> documentation.</para><para><c>Converse</c> and <c>ConverseStream</c> reject an empty JSON Pointer or incorrectly
            structured JSON Pointer with a <c>400</c> error code. if the JSON Pointer is valid,
            but the requested field is not in the model response, it is ignored by <c>Converse</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.ToolChoice_Any">
            <summary>
            <para>
            <para>The model must request at least one tool (no text is generated).</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.ToolChoice_Auto">
            <summary>
            <para>
            <para>(Default). The Model automatically decides if a tool should be called or whether to
            generate text instead. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.GuardrailConfig_GuardrailIdentifier">
            <summary>
            <para>
            <para>The identifier for the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.GuardrailConfig_GuardrailVersion">
            <summary>
            <para>
            <para>The version of the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.PerformanceConfig_Latency">
            <summary>
            <para>
            <para>To use a latency-optimized version of the model, set to <c>optimized</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.InferenceConfig_MaxToken">
            <summary>
            <para>
            <para>The maximum number of tokens to allow in the generated response. The default value
            is the maximum allowed value for the model that you are using. For more information,
            see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters for foundation models</a>. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.Message">
            <summary>
            <para>
            <para>The messages that you want to send to the model.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.ModelId">
            <summary>
            <para>
            <para>Specifies the model or throughput with which to run inference, or the prompt resource
            to use in inference. The value depends on the resource that you use:</para><ul><li><para>If you use a base model, specify the model ID or its ARN. For a list of model IDs
            for base models, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns">Amazon
            Bedrock base model IDs (on-demand throughput)</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use an inference profile, specify the inference profile ID or its ARN. For
            a list of inference profile IDs, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference-support.html">Supported
            Regions and models for cross-region inference</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a provisioned model, specify the ARN of the Provisioned Throughput. For
            more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html">Run
            inference using a Provisioned Throughput</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a custom model, first purchase Provisioned Throughput for it. Then specify
            the ARN of the resulting provisioned model. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html">Use
            a custom model in Amazon Bedrock</a> in the Amazon Bedrock User Guide.</para></li><li><para>To include a prompt that was defined in <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management.html">Prompt
            management</a>, specify the ARN of the prompt version to use.</para></li></ul><para>The Converse API doesn't support <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html">imported
            models</a>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.Tool_Name">
            <summary>
            <para>
            <para>The name of the tool that the model must request. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.PromptVariable">
            <summary>
            <para>
            <para>Contains a map of variables in a prompt from Prompt management to objects containing
            the values to fill in for them when running model invocation. This field is ignored
            if you don't specify a prompt resource in the <c>modelId</c> field.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.RequestMetadata">
            <summary>
            <para>
            <para>Key-value pairs that you can use to filter invocation logs.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.InferenceConfig_StopSequence">
            <summary>
            <para>
            <para>A list of stop sequences. A stop sequence is a sequence of characters that causes
            the model to stop generating the response. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.System">
            <summary>
            <para>
            <para>A prompt that provides instructions or context to the model about the task it should
            perform, or the persona it should adopt during the conversation.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.InferenceConfig_Temperature">
            <summary>
            <para>
            <para>The likelihood of the model selecting higher-probability options while generating
            a response. A lower value makes the model more likely to choose higher-probability
            options, while a higher value makes the model more likely to choose lower-probability
            options.</para><para>The default value is the default value for the model that you are using. For more
            information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters for foundation models</a>. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.ToolConfig_Tool">
            <summary>
            <para>
            <para>An array of tools that you want to pass to a model.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.InferenceConfig_TopP">
            <summary>
            <para>
            <para>The percentage of most-likely candidates that the model considers for the next token.
            For example, if you choose a value of 0.8 for <c>topP</c>, the model selects from
            the top 80% of the probability distribution of tokens that could be next in the sequence.</para><para>The default value is the default value for the model that you are using. For more
            information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters for foundation models</a>. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.GuardrailConfig_Trace">
            <summary>
            <para>
            <para>The trace behavior for the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is '*'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.ConverseResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.ConverseResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.PassThru">
            <summary>
            Changes the cmdlet behavior to return the value passed to the ModelId parameter.
            The -PassThru parameter is deprecated, use -Select '^ModelId' instead. This parameter will be removed in a future version.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseCmdlet.Force">
            <summary>
            This parameter overrides confirmation prompts to force
            the cmdlet to continue its operation. This parameter should always
            be used with caution.
            </summary>
        </member>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet">
            <summary>
            Sends messages to the specified Amazon Bedrock model and returns the response in a
            stream. <c>ConverseStream</c> provides a consistent API that works with all Amazon
            Bedrock models that support messages. This allows you to write code once and use it
            with different models. Should a model have unique inference parameters, you can also
            pass those unique parameters to the model.
             
              
            <para>
            To find out if a model supports streaming, call <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_GetFoundationModel.html">GetFoundationModel</a>
            and check the <c>responseStreamingSupported</c> field in the response.
            </para><note><para>
            The CLI doesn't support streaming operations in Amazon Bedrock, including <c>ConverseStream</c>.
            </para></note><para>
            Amazon Bedrock doesn't store any text, images, or documents that you provide as content.
            The data is only used to generate the response.
            </para><para>
            You can submit a prompt by including it in the <c>messages</c> field, specifying the
            <c>modelId</c> of a foundation model or inference profile to run inference on it,
            and including any other fields that are relevant to your use case.
            </para><para>
            You can also submit a prompt from Prompt management by specifying the ARN of the prompt
            version and including a map of variables to values in the <c>promptVariables</c> field.
            You can append more messages to the prompt by using the <c>messages</c> field. If
            you use a prompt from Prompt management, you can't include the following fields in
            the request: <c>additionalModelRequestFields</c>, <c>inferenceConfig</c>, <c>system</c>,
            or <c>toolConfig</c>. Instead, these fields must be defined through Prompt management.
            For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management-use.html">Use
            a prompt from Prompt management</a>.
            </para><para>
            For information about the Converse API, see <i>Use the Converse API</i> in the <i>Amazon
            Bedrock User Guide</i>. To use a guardrail, see <i>Use a guardrail with the Converse
            API</i> in the <i>Amazon Bedrock User Guide</i>. To use a tool with a model, see <i>Tool
            use (Function calling)</i> in the <i>Amazon Bedrock User Guide</i></para><para>
            For example code, see <i>Conversation streaming example</i> in the <i>Amazon Bedrock
            User Guide</i>.
            </para><para>
            This operation requires permission for the <c>bedrock:InvokeModelWithResponseStream</c>
            action.
            </para><important><para>
            To deny all inference access to resources that you specify in the modelId field, you
            need to deny access to the <c>bedrock:InvokeModel</c> and <c>bedrock:InvokeModelWithResponseStream</c>
            actions. Doing this also denies access to the resource through the base inference
            actions (<a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModel.html">InvokeModel</a>
            and <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModelWithResponseStream.html">InvokeModelWithResponseStream</a>).
            For more information see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference">Deny
            access for inference on specific models</a>.
            </para></important><para>
            For troubleshooting some of the common errors you might encounter when using the <c>ConverseStream</c>
            API, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html">Troubleshooting
            Amazon Bedrock API Error Codes</a> in the Amazon Bedrock User Guide
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.AdditionalModelRequestField">
            <summary>
            <para>
            <para>Additional inference parameters that the model supports, beyond the base set of inference
            parameters that <c>Converse</c> and <c>ConverseStream</c> support in the <c>inferenceConfig</c>
            field. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Model
            parameters</a>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.AdditionalModelResponseFieldPath">
            <summary>
            <para>
            <para>Additional model parameters field paths to return in the response. <c>Converse</c>
            and <c>ConverseStream</c> return the requested fields as a JSON Pointer object in
            the <c>additionalModelResponseFields</c> field. The following is example JSON for
            <c>additionalModelResponseFieldPaths</c>.</para><para><c>[ "/stop_sequence" ]</c></para><para>For information about the JSON Pointer syntax, see the <a href="https://datatracker.ietf.org/doc/html/rfc6901">Internet
            Engineering Task Force (IETF)</a> documentation.</para><para><c>Converse</c> and <c>ConverseStream</c> reject an empty JSON Pointer or incorrectly
            structured JSON Pointer with a <c>400</c> error code. if the JSON Pointer is valid,
            but the requested field is not in the model response, it is ignored by <c>Converse</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.ToolChoice_Any">
            <summary>
            <para>
            <para>The model must request at least one tool (no text is generated).</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.ToolChoice_Auto">
            <summary>
            <para>
            <para>(Default). The Model automatically decides if a tool should be called or whether to
            generate text instead. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.GuardrailConfig_GuardrailIdentifier">
            <summary>
            <para>
            <para>The identifier for the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.GuardrailConfig_GuardrailVersion">
            <summary>
            <para>
            <para>The version of the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.PerformanceConfig_Latency">
            <summary>
            <para>
            <para>To use a latency-optimized version of the model, set to <c>optimized</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.InferenceConfig_MaxToken">
            <summary>
            <para>
            <para>The maximum number of tokens to allow in the generated response. The default value
            is the maximum allowed value for the model that you are using. For more information,
            see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters for foundation models</a>. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.Message">
            <summary>
            <para>
            <para>The messages that you want to send to the model.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.ModelId">
            <summary>
            <para>
            <para>Specifies the model or throughput with which to run inference, or the prompt resource
            to use in inference. The value depends on the resource that you use:</para><ul><li><para>If you use a base model, specify the model ID or its ARN. For a list of model IDs
            for base models, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns">Amazon
            Bedrock base model IDs (on-demand throughput)</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use an inference profile, specify the inference profile ID or its ARN. For
            a list of inference profile IDs, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference-support.html">Supported
            Regions and models for cross-region inference</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a provisioned model, specify the ARN of the Provisioned Throughput. For
            more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html">Run
            inference using a Provisioned Throughput</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a custom model, first purchase Provisioned Throughput for it. Then specify
            the ARN of the resulting provisioned model. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html">Use
            a custom model in Amazon Bedrock</a> in the Amazon Bedrock User Guide.</para></li><li><para>To include a prompt that was defined in <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management.html">Prompt
            management</a>, specify the ARN of the prompt version to use.</para></li></ul><para>The Converse API doesn't support <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html">imported
            models</a>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.Tool_Name">
            <summary>
            <para>
            <para>The name of the tool that the model must request. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.PromptVariable">
            <summary>
            <para>
            <para>Contains a map of variables in a prompt from Prompt management to objects containing
            the values to fill in for them when running model invocation. This field is ignored
            if you don't specify a prompt resource in the <c>modelId</c> field.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.RequestMetadata">
            <summary>
            <para>
            <para>Key-value pairs that you can use to filter invocation logs.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.InferenceConfig_StopSequence">
            <summary>
            <para>
            <para>A list of stop sequences. A stop sequence is a sequence of characters that causes
            the model to stop generating the response. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.GuardrailConfig_StreamProcessingMode">
            <summary>
            <para>
            <para>The processing mode. </para><para>The processing mode. For more information, see <i>Configure streaming response behavior</i>
            in the <i>Amazon Bedrock User Guide</i>. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.System">
            <summary>
            <para>
            <para>A prompt that provides instructions or context to the model about the task it should
            perform, or the persona it should adopt during the conversation.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.InferenceConfig_Temperature">
            <summary>
            <para>
            <para>The likelihood of the model selecting higher-probability options while generating
            a response. A lower value makes the model more likely to choose higher-probability
            options, while a higher value makes the model more likely to choose lower-probability
            options.</para><para>The default value is the default value for the model that you are using. For more
            information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters for foundation models</a>. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.ToolConfig_Tool">
            <summary>
            <para>
            <para>An array of tools that you want to pass to a model.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.InferenceConfig_TopP">
            <summary>
            <para>
            <para>The percentage of most-likely candidates that the model considers for the next token.
            For example, if you choose a value of 0.8 for <c>topP</c>, the model selects from
            the top 80% of the probability distribution of tokens that could be next in the sequence.</para><para>The default value is the default value for the model that you are using. For more
            information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters for foundation models</a>. </para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.GuardrailConfig_Trace">
            <summary>
            <para>
            <para>The trace behavior for the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is 'Stream'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.ConverseStreamResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.ConverseStreamResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.PassThru">
            <summary>
            Changes the cmdlet behavior to return the value passed to the ModelId parameter.
            The -PassThru parameter is deprecated, use -Select '^ModelId' instead. This parameter will be removed in a future version.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRConverseStreamCmdlet.Force">
            <summary>
            This parameter overrides confirmation prompts to force
            the cmdlet to continue its operation. This parameter should always
            be used with caution.
            </summary>
        </member>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet">
            <summary>
            The action to apply a guardrail.
             
              
            <para>
            For troubleshooting some of the common errors you might encounter when using the <c>ApplyGuardrail</c>
            API, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html">Troubleshooting
            Amazon Bedrock API Error Codes</a> in the Amazon Bedrock User Guide
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet.Content">
            <summary>
            <para>
            <para>The content details used in the request to apply the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet.GuardrailIdentifier">
            <summary>
            <para>
            <para>The guardrail identifier used in the request to apply the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet.GuardrailVersion">
            <summary>
            <para>
            <para>The guardrail version used in the request to apply the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet.Source">
            <summary>
            <para>
            <para>The source of data used in the request to apply the guardrail.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is '*'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.ApplyGuardrailResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.ApplyGuardrailResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet.PassThru">
            <summary>
            Changes the cmdlet behavior to return the value passed to the GuardrailIdentifier parameter.
            The -PassThru parameter is deprecated, use -Select '^GuardrailIdentifier' instead. This parameter will be removed in a future version.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRGuardrailCmdlet.Force">
            <summary>
            This parameter overrides confirmation prompts to force
            the cmdlet to continue its operation. This parameter should always
            be used with caution.
            </summary>
        </member>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet">
            <summary>
            Invokes the specified Amazon Bedrock model to run inference using the prompt and inference
            parameters provided in the request body. You use model inference to generate text,
            images, and embeddings.
             
              
            <para>
            For example code, see <i>Invoke model code examples</i> in the <i>Amazon Bedrock User
            Guide</i>.
            </para><para>
            This operation requires permission for the <c>bedrock:InvokeModel</c> action.
            </para><important><para>
            To deny all inference access to resources that you specify in the modelId field, you
            need to deny access to the <c>bedrock:InvokeModel</c> and <c>bedrock:InvokeModelWithResponseStream</c>
            actions. Doing this also denies access to the resource through the Converse API actions
            (<a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html">Converse</a>
            and <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ConverseStream.html">ConverseStream</a>).
            For more information see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference">Deny
            access for inference on specific models</a>.
            </para></important><para>
            For troubleshooting some of the common errors you might encounter when using the <c>InvokeModel</c>
            API, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html">Troubleshooting
            Amazon Bedrock API Error Codes</a> in the Amazon Bedrock User Guide
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.Accept">
            <summary>
            <para>
            <para>The desired MIME type of the inference body in the response. The default value is
            <c>application/json</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.Body">
            <summary>
            <para>
            <para>The prompt and inference parameters in the format specified in the <c>contentType</c>
            in the header. You must provide the body in JSON format. To see the format and content
            of the request and response bodies for different models, refer to <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters</a>. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/api-methods-run.html">Run
            inference</a> in the Bedrock User Guide.</para>
            </para>
            <para>The cmdlet will automatically convert the supplied parameter of type string, string[], System.IO.FileInfo or System.IO.Stream to byte[] before supplying it to the service.</para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.ContentType">
            <summary>
            <para>
            <para>The MIME type of the input data in the request. You must specify <c>application/json</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.GuardrailIdentifier">
            <summary>
            <para>
            <para>The unique identifier of the guardrail that you want to use. If you don't provide
            a value, no guardrail is applied to the invocation.</para><para>An error will be thrown in the following situations.</para><ul><li><para>You don't provide a guardrail identifier but you specify the <c>amazon-bedrock-guardrailConfig</c>
            field in the request body.</para></li><li><para>You enable the guardrail but the <c>contentType</c> isn't <c>application/json</c>.</para></li><li><para>You provide a guardrail identifier, but <c>guardrailVersion</c> isn't specified.</para></li></ul>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.GuardrailVersion">
            <summary>
            <para>
            <para>The version number for the guardrail. The value can also be <c>DRAFT</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.ModelId">
            <summary>
            <para>
            <para>The unique identifier of the model to invoke to run inference.</para><para>The <c>modelId</c> to provide depends on the type of model or throughput that you
            use:</para><ul><li><para>If you use a base model, specify the model ID or its ARN. For a list of model IDs
            for base models, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns">Amazon
            Bedrock base model IDs (on-demand throughput)</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use an inference profile, specify the inference profile ID or its ARN. For
            a list of inference profile IDs, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference-support.html">Supported
            Regions and models for cross-region inference</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a provisioned model, specify the ARN of the Provisioned Throughput. For
            more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html">Run
            inference using a Provisioned Throughput</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a custom model, first purchase Provisioned Throughput for it. Then specify
            the ARN of the resulting provisioned model. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html">Use
            a custom model in Amazon Bedrock</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use an <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html">imported
            model</a>, specify the ARN of the imported model. You can get the model ARN from a
            successful call to <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_CreateModelImportJob.html">CreateModelImportJob</a>
            or from the Imported models page in the Amazon Bedrock console.</para></li></ul>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.PerformanceConfigLatency">
            <summary>
            <para>
            <para>Model performance settings for the request.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.Trace">
            <summary>
            <para>
            <para>Specifies whether to enable or disable the Bedrock trace. If enabled, you can see
            the full Bedrock trace.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is '*'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.InvokeModelResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.InvokeModelResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.PassThru">
            <summary>
            Changes the cmdlet behavior to return the value passed to the ModelId parameter.
            The -PassThru parameter is deprecated, use -Select '^ModelId' instead. This parameter will be removed in a future version.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelCmdlet.Force">
            <summary>
            This parameter overrides confirmation prompts to force
            the cmdlet to continue its operation. This parameter should always
            be used with caution.
            </summary>
        </member>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet">
            <summary>
            Invoke the specified Amazon Bedrock model to run inference using the prompt and inference
            parameters provided in the request body. The response is returned in a stream.
             
              
            <para>
            To see if a model supports streaming, call <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_GetFoundationModel.html">GetFoundationModel</a>
            and check the <c>responseStreamingSupported</c> field in the response.
            </para><note><para>
            The CLI doesn't support streaming operations in Amazon Bedrock, including <c>InvokeModelWithResponseStream</c>.
            </para></note><para>
            For example code, see <i>Invoke model with streaming code example</i> in the <i>Amazon
            Bedrock User Guide</i>.
            </para><para>
            This operation requires permissions to perform the <c>bedrock:InvokeModelWithResponseStream</c>
            action.
            </para><important><para>
            To deny all inference access to resources that you specify in the modelId field, you
            need to deny access to the <c>bedrock:InvokeModel</c> and <c>bedrock:InvokeModelWithResponseStream</c>
            actions. Doing this also denies access to the resource through the Converse API actions
            (<a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html">Converse</a>
            and <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ConverseStream.html">ConverseStream</a>).
            For more information see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference">Deny
            access for inference on specific models</a>.
            </para></important><para>
            For troubleshooting some of the common errors you might encounter when using the <c>InvokeModelWithResponseStream</c>
            API, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html">Troubleshooting
            Amazon Bedrock API Error Codes</a> in the Amazon Bedrock User Guide
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.Accept">
            <summary>
            <para>
            <para>The desired MIME type of the inference body in the response. The default value is
            <c>application/json</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.Body">
            <summary>
            <para>
            <para>The prompt and inference parameters in the format specified in the <c>contentType</c>
            in the header. You must provide the body in JSON format. To see the format and content
            of the request and response bodies for different models, refer to <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference
            parameters</a>. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/api-methods-run.html">Run
            inference</a> in the Bedrock User Guide.</para>
            </para>
            <para>The cmdlet will automatically convert the supplied parameter of type string, string[], System.IO.FileInfo or System.IO.Stream to byte[] before supplying it to the service.</para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.ContentType">
            <summary>
            <para>
            <para>The MIME type of the input data in the request. You must specify <c>application/json</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.GuardrailIdentifier">
            <summary>
            <para>
            <para>The unique identifier of the guardrail that you want to use. If you don't provide
            a value, no guardrail is applied to the invocation.</para><para>An error is thrown in the following situations.</para><ul><li><para>You don't provide a guardrail identifier but you specify the <c>amazon-bedrock-guardrailConfig</c>
            field in the request body.</para></li><li><para>You enable the guardrail but the <c>contentType</c> isn't <c>application/json</c>.</para></li><li><para>You provide a guardrail identifier, but <c>guardrailVersion</c> isn't specified.</para></li></ul>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.GuardrailVersion">
            <summary>
            <para>
            <para>The version number for the guardrail. The value can also be <c>DRAFT</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.ModelId">
            <summary>
            <para>
            <para>The unique identifier of the model to invoke to run inference.</para><para>The <c>modelId</c> to provide depends on the type of model or throughput that you
            use:</para><ul><li><para>If you use a base model, specify the model ID or its ARN. For a list of model IDs
            for base models, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns">Amazon
            Bedrock base model IDs (on-demand throughput)</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use an inference profile, specify the inference profile ID or its ARN. For
            a list of inference profile IDs, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference-support.html">Supported
            Regions and models for cross-region inference</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a provisioned model, specify the ARN of the Provisioned Throughput. For
            more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html">Run
            inference using a Provisioned Throughput</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use a custom model, first purchase Provisioned Throughput for it. Then specify
            the ARN of the resulting provisioned model. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html">Use
            a custom model in Amazon Bedrock</a> in the Amazon Bedrock User Guide.</para></li><li><para>If you use an <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html">imported
            model</a>, specify the ARN of the imported model. You can get the model ARN from a
            successful call to <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_CreateModelImportJob.html">CreateModelImportJob</a>
            or from the Imported models page in the Amazon Bedrock console.</para></li></ul>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.PerformanceConfigLatency">
            <summary>
            <para>
            <para>Model performance settings for the request.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.Trace">
            <summary>
            <para>
            <para>Specifies whether to enable or disable the Bedrock trace. If enabled, you can see
            the full Bedrock trace.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is '*'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.InvokeModelWithResponseStreamResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.InvokeModelWithResponseStreamResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.PassThru">
            <summary>
            Changes the cmdlet behavior to return the value passed to the ModelId parameter.
            The -PassThru parameter is deprecated, use -Select '^ModelId' instead. This parameter will be removed in a future version.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.InvokeBDRRModelWithResponseStreamCmdlet.Force">
            <summary>
            This parameter overrides confirmation prompts to force
            the cmdlet to continue its operation. This parameter should always
            be used with caution.
            </summary>
        </member>
        <member name="T:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet">
            <summary>
            Starts an asynchronous invocation.
             
              
            <para>
            This operation requires permission for the <c>bedrock:InvokeModel</c> action.
            </para><important><para>
            To deny all inference access to resources that you specify in the modelId field, you
            need to deny access to the <c>bedrock:InvokeModel</c> and <c>bedrock:InvokeModelWithResponseStream</c>
            actions. Doing this also denies access to the resource through the Converse API actions
            (<a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html">Converse</a>
            and <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ConverseStream.html">ConverseStream</a>).
            For more information see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference">Deny
            access for inference on specific models</a>.
            </para></important>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.S3OutputDataConfig_BucketOwner">
            <summary>
            <para>
            <para>If the bucket belongs to another AWS account, specify that account's ID.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.ClientRequestToken">
            <summary>
            <para>
            <para>Specify idempotency token to ensure that requests are not duplicated.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.S3OutputDataConfig_KmsKeyId">
            <summary>
            <para>
            <para>A KMS encryption key ID.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.ModelId">
            <summary>
            <para>
            <para>The model to invoke.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.ModelInput">
            <summary>
            <para>
            <para>Input to send to the model.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.S3OutputDataConfig_S3Uri">
            <summary>
            <para>
            <para>An object URI starting with <c>s3://</c>.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.Tag">
            <summary>
            <para>
            <para>Tags to apply to the invocation.</para>
            </para>
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.Select">
            <summary>
            Use the -Select parameter to control the cmdlet output. The default value is 'InvocationArn'.
            Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.BedrockRuntime.Model.StartAsyncInvokeResponse).
            Specifying the name of a property of type Amazon.BedrockRuntime.Model.StartAsyncInvokeResponse will result in that property being returned.
            Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.PassThru">
            <summary>
            Changes the cmdlet behavior to return the value passed to the ModelId parameter.
            The -PassThru parameter is deprecated, use -Select '^ModelId' instead. This parameter will be removed in a future version.
            </summary>
        </member>
        <member name="P:Amazon.PowerShell.Cmdlets.BDRR.StartBDRRAsyncInvokeCmdlet.Force">
            <summary>
            This parameter overrides confirmation prompts to force
            the cmdlet to continue its operation. This parameter should always
            be used with caution.
            </summary>
        </member>
    </members>
</doc>