Public/New-ChatCompletions.ps1

function New-ChatCompletions {
    <#
    .EXTERNALHELP
        code365scripts.openai-help.xml
    .SYNOPSIS
        Create a new ChatGPT conversation or get a Chat Completion result if you specify the prompt parameter directly.
    .DESCRIPTION
        Create a new ChatGPT conversation, You can chat with the OpenAI service just like chat with a human. You can also get the chat completion result if you specify the prompt parameter.
    .PARAMETER api_key
        The API key to access OpenAI service, if not specified, the API key will be read from environment variable OPENAI_API_KEY. You can also use "token" or "access_token" or "accesstoken" as the alias.
    .PARAMETER model
        The model to use for this request, you can also set it in environment variable OPENAI_API_MODEL. If you are using Azure OpenAI Service, the model should be the deployment name you created in portal.
    .PARAMETER endpoint
        The endpoint to use for this request, you can also set it in environment variable OPENAI_API_ENDPOINT. You can also use some special value to specify the endpoint, like "ollama", "local", "kimi", "zhipu".
    .PARAMETER system
        The system prompt, this is a string, you can use it to define the role you want it be, for example, "You are a chatbot, please answer the user's question according to the user's language."
        If you provide a file path to this parameter, we will read the file as the system prompt.
        You can also specify a url to this parameter, we will read the url as the system prompt.
        You can read the prompt from a library (https://github.com/code365opensource/promptlibrary), by use "lib:xxxxx" as the prompt, for example, "lib:fitness".
    .PARAMETER prompt
        If you want to get result immediately, you can use this parameter to define the prompt. It will not start the chat conversation.
        If you provide a file path to this parameter, we will read the file as the prompt.
        You can also specify a url to this parameter, we will read the url as the prompt.
        You can read the prompt from a library (https://github.com/code365opensource/promptlibrary), by use "lib:xxxxx" as the prompt, for example, "lib:fitness".
    .PARAMETER config
        The dynamic settings for the API call, it can meet all the requirement for each model. please pass a custom object to this parameter, like @{temperature=1;max_tokens=1024}.
    .PARAMETER outFile
        If you want to save the result to a file, you can use this parameter to set the file path. You can also use "out" as the alias.
    .PARAMETER context
        If you want to pass some dymamic value to the prompt, you can use the context parameter here. It can be anything, you just specify a custom powershell object here. You define the variables in the system prompt or user prompt by using {{you_variable_name}} syntext, and then pass the data to the context parameter, like @{you_variable_name="your value"}. if there are multiple variables, you can use @{variable1="value1";variable2="value2"}.
    .PARAMETER headers
        If you want to pass some custom headers to the API call, you can use this parameter. You can pass a custom hashtable to this parameter, like @{header1="value1";header2="value2"}.
    .PARAMETER json
        Send the response in json format.
    .PARAMETER functions
        This is s super powerful feature to support the function_call of OpenAI, you can specify the function name(s) and it will be automatically called when the assistant needs it. You can find all the avaliable functions definition here (https://raw.githubusercontent.com/chenxizhang/openai-powershell/master/code365scripts.openai/Private/functions.json)
    .PARAMETER environment
        If you have multiple environment to use, you can specify the environment name here, and then define the environment in the profile.json file. You can also use "profile" or "env" as the alias.
    .PARAMETER env_config
        The profile.json file path, the default value is "$env:USERPROFILE/.openai-powershell/profile.json".
    .OUTPUTS
        System.String, the completion result.
    .LINK
        https://github.com/chenxizhang/openai-powershell
    #>


    [CmdletBinding()]
    [Alias("gpt")]
    param(
        [Alias("token", "access_token", "accesstoken", "key", "apikey")]
        [string]$api_key,
        [Alias("engine", "deployment")]
        [string]$model,
        [string]$endpoint,
        [string]$system = "You are a chatbot, please answer the user's question according to the user's language.",
        [Parameter(ValueFromPipeline = $true, Position = 0, Mandatory = $true)]
        [string]$prompt,
        [Alias("settings")]
        [PSCustomObject]$config, 
        [Alias("out")]   
        [string]$outFile,
        [switch]$json,
        [Alias("variables")]
        [PSCustomObject]$context,
        [PSCustomObject]$headers,
        [string[]]$functions,
        [switch]$passthru,
        [Alias("profile", "env")]
        [string]$environment,
        [string]$env_config = "$env:USERPROFILE/.openai-powershell/profile.json"
    )
    BEGIN {

        Write-Verbose ($resources.verbose_parameters_received -f ($PSBoundParameters | Out-String))
        Write-Verbose ($resources.verbose_environment_received -f (Get-ChildItem Env:OPENAI_API_* | Out-String))
        if ($environment) {
            if ($env_config -match "\.json$" -and (Test-Path $env_config -PathType Leaf)) {
                $env_config = Get-Content $env_config -Raw -Encoding UTF8 
            }

            $parsed_env_config = ($env_config | ConvertFrom-Json | ConvertTo-Hashtable).profiles | Where-Object { $_.name -eq $environment } | Select-Object -First 1

            if ($parsed_env_config) {
                if ($parsed_env_config.api_key -and (!$api_key)) { $api_key = $parsed_env_config.api_key }
                if ($parsed_env_config.model -and (!$model)) { $model = $parsed_env_config.model }
                if ($parsed_env_config.endpoint -and (!$endpoint)) { $endpoint = $parsed_env_config.endpoint }
                if ($parsed_env_config.config) { 
                    if ($config) {
                        Merge-Hashtable -table1 $config -table2 $parsed_env_config.config
                    }
                    else {
                        $config = $parsed_env_config.config
                    }
                }
                if ($parsed_env_config.headers) {

                    # foreach all the headers, if the value contains {{model}} then replace it with the model, and if the value contains {{guid}} then replace it with a new guid
                    $keys = @($parsed_env_config.headers.Keys)
                    $keys | ForEach-Object {
                        $parsed_env_config.headers[$_] = $parsed_env_config.headers[$_] -replace "{{model}}", $model
                        $parsed_env_config.headers[$_] = $parsed_env_config.headers[$_] -replace "{{guid}}", [guid]::NewGuid().ToString()
                    }

                    if ($headers) {
                        Merge-Hashtable -table1 $headers -table2 $parsed_env_config.headers
                    }
                    else {
                        $headers = $parsed_env_config.headers
                    }
                }

                if ($parsed_env_config.auth -and ($parsed_env_config.auth.type -eq "aad") -and $parsed_env_config.auth.aad) {

                    Confirm-DependencyModule -ModuleName "MSAL.ps"

                    $aad = $parsed_env_config.auth.aad
                    $accesstoken = (Get-MsalToken @aad).AccessToken
                    $api_key = $accesstoken
                }

                # if user provide the functions definition, then merge the functions definition to the config
                if ($parsed_env_config.functions) {
                    if ($functions) {
                        $functions += $parsed_env_config.functions
                    }
                    else {
                        $functions = $parsed_env_config.functions
                    }
                }
            }
        }

        $api_key = ($api_key, [System.Environment]::GetEnvironmentVariable("OPENAI_API_KEY") | Where-Object { $_.Length -gt 0 } | Select-Object -First 1)
        $model = ($model, [System.Environment]::GetEnvironmentVariable("OPENAI_API_MODEL"), "gpt-3.5-turbo" | Where-Object { $_.Length -gt 0 } | Select-Object -First 1)
        $endpoint = ($endpoint, [System.Environment]::GetEnvironmentVariable("OPENAI_API_ENDPOINT"), "https://api.openai.com/v1/chat/completions" | Where-Object { $_.Length -gt 0 } | Select-Object -First 1)

        $endpoint = switch ($endpoint) {
            { $_ -in ("ollama", "local") } { "http://localhost:11434/v1/chat/completions" }
            "kimi" { "https://api.moonshot.cn/v1/chat/completions" }
            "zhipu" { "https://open.bigmodel.cn/api/paas/v4/chat/completions" }
            default { $endpoint }
        }

        # if use local model, and api_key is not specify, then generate a random key
        if ($endpoint -eq "http://localhost:11434/v1/chat/completions" -and !$api_key) {
            $api_key = "local"
        }

        Write-Verbose ($resources.verbose_parameters_parsed -f $api_key, $model, $endpoint)

        $hasError = $false

        if (!$api_key) {
            Write-Error $resources.error_missing_api_key
            $hasError = $true
        }

        if (!$model) {
            Write-Error $resources.error_missing_engine
            $hasError = $true
        }

        if (!$endpoint) {
            Write-Error $resources.error_missing_endpoint
            $hasError = $true
        }

        if ($hasError) {
            return
        }

        # if endpoint contains ".openai.azure.com", then people wants to use azure openai service, try to concat the endpoint with the model
        if ($endpoint.EndsWith("openai.azure.com/")) {
            $version = Get-AzureAPIVersion
            $endpoint += "openai/deployments/$model/chat/completions?api-version=$version"
        }

        # add databricks support, it will use the basic authorization method, not the bearer token
        $azure = $endpoint.Contains("openai.azure.com")

        $header = if ($azure) { 
            # if the apikey is a jwt, then use the bearer token in authorization header
            if ($api_key -match "^ey[a-zA-Z0-9-_]+\.[a-zA-Z0-9-_]+\.[a-zA-Z0-9-_]+$") {
                @{"Authorization" = "Bearer $api_key" }
            }
            else {
                @{"api-key" = "$api_key" } 
            }
        }
        else { 
            # dbrx instruct use the basic authorization method
            
            @{"Authorization" = "$(if($endpoint.Contains("databricks-dbrx-instruct")){"Basic"}else{"Bearer"}) $api_key" } 
        }

        # if user provide the headers, merge the headers to the default headers
        if ($headers) {
            Merge-Hashtable -table1 $header -table2 $headers
        }

        # if user provide the functions, get the functions from the functions file and define the tools and tool_choice thoughs the config parameter
        if ($functions) {
            $tools = @(Get-PredefinedFunctions -names $functions)
            if ($tools.Count -gt 0) {
                if ($null -eq $config) {
                    $config = @{}
                }
                $config["tools"] = $tools
                $config["tool_choice"] = "auto"
            }
        }

        
    }

    PROCESS {
        $telemetries = @{
            type = switch ($endpoint) {
                { $_ -match "openai.azure.com" } { "azure" }
                { $_ -match "localhost" } { "local" }
                { $_ -match "databricks-dbrx" } { "dbrx" }
                { $_ -match "api.openai.com" } { "openai" }
                { $_ -match "platform.moonshot.cn" } { "kimi" }
                { $_ -match "open.bigmodel.cn" } { "zhipu" }
                default { $endpoint }
            }
        }

        # if prompt is not empty and it is a file, then read the file as the prompt
        $parsedprompt = Get-PromptContent -prompt $prompt -context $context
        $prompt = $parsedprompt.content
        $telemetries.Add("promptType", $parsedprompt.type)
        $telemetries.Add("promptLib", $parsedprompt.lib)

        # if system is not empty and it is a file, then read the file as the system prompt
        $parsedsystem = Get-PromptContent -prompt $system -context $context
        $system = $parsedsystem.content

        $telemetries.Add("systemPromptType", $parsedsystem.type)
        $telemetries.Add("systemPromptLib", $parsedsystem.lib)

        # collect the telemetry data
        Submit-Telemetry -cmdletName $MyInvocation.MyCommand.Name -innovationName $MyInvocation.InvocationName -props $telemetries



        # user provides the prompt directly, so enter the completion mode (return the result directly)
        Write-Verbose ($resources.verbose_prompt_mode -f $prompt)
        $messages = @(
            @{
                role    = "system"
                content = $system
            },
            @{
                role    = "user"
                content = $prompt
            }
        ) 

        $body = @{model = "$model"; messages = $messages }

        $params = @{
            Uri     = $endpoint
            Method  = "POST"
            Headers = $header
        }

        if ($json) {
            $body.Add("response_format" , @{type = "json_object" } )
        }

        if ($config) {
            Merge-Hashtable -table1 $body -table2 $config
        }

        $params.Body = ($body | ConvertTo-Json -Depth 10)

        Write-Verbose ($resources.verbose_prepare_params -f ($params | ConvertTo-Json -Depth 10))

        $response = Invoke-UniWebRequest $params

        # if return the tool_calls, then execute the tool locally and add send the message again.

        while ($response.choices -and $response.choices[0].message.tool_calls) {
            # add the assistant message
            $this_message = $response.choices[0].message
            $body.messages += $this_message
            $tool_calls = $this_message.tool_calls
                
            foreach ($tool in $tool_calls) {
                Write-Verbose "$($resources.function_call): $($tool.function.name)"
                $function_args = $tool.function.arguments | ConvertFrom-Json
                $tool_response = Invoke-Expression ("{0} {1}" -f $tool.function.name, (
                        $function_args.PSObject.Properties | ForEach-Object {
                            "-{0} {1}" -f $_.Name, $_.Value
                        }
                    ) -join " ")

                $body.messages += @{
                    role         = "tool"
                    name         = $tool.function.name
                    tool_call_id = $tool.id
                    content      = $tool_response
                }
            }

            $params.Body = ($body | ConvertTo-Json -Depth 10)
            $response = Invoke-UniWebRequest $params
        }

        Write-Verbose ($resources.verbose_response_utf8 -f ($response | ConvertTo-Json -Depth 10))

        $result = $response.choices[0].message.content
        Write-Verbose ($resources.verbose_response_plain_text -f $result)

        #if user specify the outfile, write the response to the file
        if ($outFile) {
            Write-Verbose ($resources.verbose_outfile_specified -f $outFile)
            $result | Out-File -FilePath $outFile -Encoding utf8

            if($passthru){
                Write-Output $result
            }
        }
        else{
            # support passthru, even though user specify the outfile, we still return the result to the pipeline
            Write-Output $result
        }
    }
}

# SIG # Begin signature block
# MIIc/QYJKoZIhvcNAQcCoIIc7jCCHOoCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAK7KYnuMcApzLl
# xPbXFwLTSSO0kQ/62loUvxM89sZWYqCCAyowggMmMIICDqADAgECAhBcsg5m3zM9
# kUZxmeNzIQNjMA0GCSqGSIb3DQEBCwUAMCoxKDAmBgNVBAMMH0NIRU5YSVpIQU5H
# IC0gQ29kZSBTaWduaW5nIENlcnQwIBcNMjQwMTA4MTMwMjA0WhgPMjA5OTEyMzEx
# NjAwMDBaMCoxKDAmBgNVBAMMH0NIRU5YSVpIQU5HIC0gQ29kZSBTaWduaW5nIENl
# cnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDKDY3QG81JOKZG9jTb
# QriDMDhq6gy93Pmoqgav9wErj+CgVvXKk+lGpUu74MWVyLUrJx8/ACb4b287wsXx
# mQj8zQ3SqGn5CCjPKoAPsSbry0LOSl8bsFpwBr3YBJVL6cibhus2KLCbNu/u7sND
# wyivKXYA1Iy1uTQPNVPcBx36krZTZyyE4CmngO75YbTMEzvHEjM3BIXdKtEt673t
# iNOVSP6doh0zRwWEh2Y/eoOpv+FUokORwhKonxMtmIIET+ZPx7Ex+9aqHrliEabx
# FsN4ETnuVT3rST++7Q2fquWFnl5scDnisFhU8JL8k+OGUzpLlo/nOpiRZkbKCEkZ
# FCLhAgMBAAGjRjBEMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcD
# AzAdBgNVHQ4EFgQUwcR3UUOZ6TxpBp9MxnBygyIMhUQwDQYJKoZIhvcNAQELBQAD
# ggEBADwiE9nowKxUNN84BTk9an1ZkdU95ouj+q6MRbafH08u4XV7CxXpkPR8Za/c
# BJWTOqCuz9pMPo0TylqWPm+++Tqy1OJ7Qewvy1+DXPuFGkTqY721uZ+YsHY3CueC
# VSRZRNsWSYE9UxXXFRsjDu/M3+EvyaNDE4xQkwrP8obFJoHq7WaOCCD2wMbKjLb5
# bS/VgtOK7Yn9pU/ghrW+Em+zHOX87wNRh/I5jd+LsnY8bR6REzgdmogIyvD4dsJD
# /IZLxRtbm2BHOn/aGBdu+GpEaYEEb6VkWcJhrQnpiNjjlu43CbRz5Bw14XPWGUDH
# +EkUqkWS4h8zsRiyvR9Pnwklg6UxghkpMIIZJQIBATA+MCoxKDAmBgNVBAMMH0NI
# RU5YSVpIQU5HIC0gQ29kZSBTaWduaW5nIENlcnQCEFyyDmbfMz2RRnGZ43MhA2Mw
# DQYJYIZIAWUDBAIBBQCgfDAQBgorBgEEAYI3AgEMMQIwADAZBgkqhkiG9w0BCQMx
# DAYKKwYBBAGCNwIBBDAcBgorBgEEAYI3AgELMQ4wDAYKKwYBBAGCNwIBFTAvBgkq
# hkiG9w0BCQQxIgQg6gARsp9r29ZjuHWcPddOUh4gAMD92ROkEcHr7ZAs8JYwDQYJ
# KoZIhvcNAQEBBQAEggEAfjDYcvfwyZzJLflB/wfF5ii41+WRHp9ldFVjN1G7T0Cm
# ip4w7MK3K4JODgnv0bn6c5cyVJBLxa0M+1pJhHhKuECsPPTJRuywQM4bApxthShZ
# M0ngI0AMiqFc2wCtyfWQTqwU6qVJZqGUhCvao9H/ImLrvkJs4GApE+xzDoiKL5Eg
# C2s2zeZarSDlGYVdgztUukGwz2ohgUKj/CqsCSvFtncsf8EYjFpMPl0gd8ih58WE
# n66+UIIAMTCsr8dUQ5+bK+aodruYPHy5H4iLnxJF43iVTgrR7rOkE+NQ6E3Mp8Nh
# j/7dcmcAz+vDb4B8p/NGpjdADjXh9vTs+fEZkMF1HaGCFz4wghc6BgorBgEEAYI3
# AwMBMYIXKjCCFyYGCSqGSIb3DQEHAqCCFxcwghcTAgEDMQ8wDQYJYIZIAWUDBAIB
# BQAwdgYLKoZIhvcNAQkQAQSgZwRlMGMCAQEGCWCGSAGG/WwHATAxMA0GCWCGSAFl
# AwQCAQUABCD2yVGVPUn9UlDpp2YP3KVGZV57VRaGg5ukbPDl1ddMGAIPJTUOIfjE
# xnzEacoyorqkGA8yMDI0MDUyNzEzNDE0MVqgghMJMIIGwjCCBKqgAwIBAgIQBUSv
# 85SdCDmmv9s/X+VhFjANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJVUzEXMBUG
# A1UEChMORGlnaUNlcnQsIEluYy4xOzA5BgNVBAMTMkRpZ2lDZXJ0IFRydXN0ZWQg
# RzQgUlNBNDA5NiBTSEEyNTYgVGltZVN0YW1waW5nIENBMB4XDTIzMDcxNDAwMDAw
# MFoXDTM0MTAxMzIzNTk1OVowSDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lD
# ZXJ0LCBJbmMuMSAwHgYDVQQDExdEaWdpQ2VydCBUaW1lc3RhbXAgMjAyMzCCAiIw
# DQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKNTRYcdg45brD5UsyPgz5/X5dLn
# XaEOCdwvSKOXejsqnGfcYhVYwamTEafNqrJq3RApih5iY2nTWJw1cb86l+uUUI8c
# IOrHmjsvlmbjaedp/lvD1isgHMGXlLSlUIHyz8sHpjBoyoNC2vx/CSSUpIIa2mq6
# 2DvKXd4ZGIX7ReoNYWyd/nFexAaaPPDFLnkPG2ZS48jWPl/aQ9OE9dDH9kgtXkV1
# lnX+3RChG4PBuOZSlbVH13gpOWvgeFmX40QrStWVzu8IF+qCZE3/I+PKhu60pCFk
# cOvV5aDaY7Mu6QXuqvYk9R28mxyyt1/f8O52fTGZZUdVnUokL6wrl76f5P17cz4y
# 7lI0+9S769SgLDSb495uZBkHNwGRDxy1Uc2qTGaDiGhiu7xBG3gZbeTZD+BYQfvY
# sSzhUa+0rRUGFOpiCBPTaR58ZE2dD9/O0V6MqqtQFcmzyrzXxDtoRKOlO0L9c33u
# 3Qr/eTQQfqZcClhMAD6FaXXHg2TWdc2PEnZWpST618RrIbroHzSYLzrqawGw9/sq
# hux7UjipmAmhcbJsca8+uG+W1eEQE/5hRwqM/vC2x9XH3mwk8L9CgsqgcT2ckpME
# tGlwJw1Pt7U20clfCKRwo+wK8REuZODLIivK8SgTIUlRfgZm0zu++uuRONhRB8qU
# t+JQofM604qDy0B7AgMBAAGjggGLMIIBhzAOBgNVHQ8BAf8EBAMCB4AwDAYDVR0T
# AQH/BAIwADAWBgNVHSUBAf8EDDAKBggrBgEFBQcDCDAgBgNVHSAEGTAXMAgGBmeB
# DAEEAjALBglghkgBhv1sBwEwHwYDVR0jBBgwFoAUuhbZbU2FL3MpdpovdYxqII+e
# yG8wHQYDVR0OBBYEFKW27xPn783QZKHVVqllMaPe1eNJMFoGA1UdHwRTMFEwT6BN
# oEuGSWh0dHA6Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNFJT
# QTQwOTZTSEEyNTZUaW1lU3RhbXBpbmdDQS5jcmwwgZAGCCsGAQUFBwEBBIGDMIGA
# MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wWAYIKwYBBQUH
# MAKGTGh0dHA6Ly9jYWNlcnRzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRH
# NFJTQTQwOTZTSEEyNTZUaW1lU3RhbXBpbmdDQS5jcnQwDQYJKoZIhvcNAQELBQAD
# ggIBAIEa1t6gqbWYF7xwjU+KPGic2CX/yyzkzepdIpLsjCICqbjPgKjZ5+PF7SaC
# inEvGN1Ott5s1+FgnCvt7T1IjrhrunxdvcJhN2hJd6PrkKoS1yeF844ektrCQDif
# XcigLiV4JZ0qBXqEKZi2V3mP2yZWK7Dzp703DNiYdk9WuVLCtp04qYHnbUFcjGnR
# uSvExnvPnPp44pMadqJpddNQ5EQSviANnqlE0PjlSXcIWiHFtM+YlRpUurm8wWkZ
# us8W8oM3NG6wQSbd3lqXTzON1I13fXVFoaVYJmoDRd7ZULVQjK9WvUzF4UbFKNOt
# 50MAcN7MmJ4ZiQPq1JE3701S88lgIcRWR+3aEUuMMsOI5ljitts++V+wQtaP4xeR
# 0arAVeOGv6wnLEHQmjNKqDbUuXKWfpd5OEhfysLcPTLfddY2Z1qJ+Panx+VPNTwA
# vb6cKmx5AdzaROY63jg7B145WPR8czFVoIARyxQMfq68/qTreWWqaNYiyjvrmoI1
# VygWy2nyMpqy0tg6uLFGhmu6F/3Ed2wVbK6rr3M66ElGt9V/zLY4wNjsHPW2obhD
# LN9OTH0eaHDAdwrUAuBcYLso/zjlUlrWrBciI0707NMX+1Br/wd3H3GXREHJuEbT
# bDJ8WC9nR2XlG3O2mflrLAZG70Ee8PBf4NvZrZCARK+AEEGKMIIGrjCCBJagAwIB
# AgIQBzY3tyRUfNhHrP0oZipeWzANBgkqhkiG9w0BAQsFADBiMQswCQYDVQQGEwJV
# UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
# Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwHhcNMjIwMzIz
# MDAwMDAwWhcNMzcwMzIyMjM1OTU5WjBjMQswCQYDVQQGEwJVUzEXMBUGA1UEChMO
# RGlnaUNlcnQsIEluYy4xOzA5BgNVBAMTMkRpZ2lDZXJ0IFRydXN0ZWQgRzQgUlNB
# NDA5NiBTSEEyNTYgVGltZVN0YW1waW5nIENBMIICIjANBgkqhkiG9w0BAQEFAAOC
# Ag8AMIICCgKCAgEAxoY1BkmzwT1ySVFVxyUDxPKRN6mXUaHW0oPRnkyibaCwzIP5
# WvYRoUQVQl+kiPNo+n3znIkLf50fng8zH1ATCyZzlm34V6gCff1DtITaEfFzsbPu
# K4CEiiIY3+vaPcQXf6sZKz5C3GeO6lE98NZW1OcoLevTsbV15x8GZY2UKdPZ7Gnf
# 2ZCHRgB720RBidx8ald68Dd5n12sy+iEZLRS8nZH92GDGd1ftFQLIWhuNyG7QKxf
# st5Kfc71ORJn7w6lY2zkpsUdzTYNXNXmG6jBZHRAp8ByxbpOH7G1WE15/tePc5Os
# LDnipUjW8LAxE6lXKZYnLvWHpo9OdhVVJnCYJn+gGkcgQ+NDY4B7dW4nJZCYOjgR
# s/b2nuY7W+yB3iIU2YIqx5K/oN7jPqJz+ucfWmyU8lKVEStYdEAoq3NDzt9KoRxr
# OMUp88qqlnNCaJ+2RrOdOqPVA+C/8KI8ykLcGEh/FDTP0kyr75s9/g64ZCr6dSgk
# Qe1CvwWcZklSUPRR8zZJTYsg0ixXNXkrqPNFYLwjjVj33GHek/45wPmyMKVM1+mY
# Slg+0wOI/rOP015LdhJRk8mMDDtbiiKowSYI+RQQEgN9XyO7ZONj4KbhPvbCdLI/
# Hgl27KtdRnXiYKNYCQEoAA6EVO7O6V3IXjASvUaetdN2udIOa5kM0jO0zbECAwEA
# AaOCAV0wggFZMBIGA1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFLoW2W1NhS9z
# KXaaL3WMaiCPnshvMB8GA1UdIwQYMBaAFOzX44LScV1kTN8uZz/nupiuHA9PMA4G
# A1UdDwEB/wQEAwIBhjATBgNVHSUEDDAKBggrBgEFBQcDCDB3BggrBgEFBQcBAQRr
# MGkwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBBBggrBgEF
# BQcwAoY1aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0VHJ1c3Rl
# ZFJvb3RHNC5jcnQwQwYDVR0fBDwwOjA4oDagNIYyaHR0cDovL2NybDMuZGlnaWNl
# cnQuY29tL0RpZ2lDZXJ0VHJ1c3RlZFJvb3RHNC5jcmwwIAYDVR0gBBkwFzAIBgZn
# gQwBBAIwCwYJYIZIAYb9bAcBMA0GCSqGSIb3DQEBCwUAA4ICAQB9WY7Ak7ZvmKlE
# IgF+ZtbYIULhsBguEE0TzzBTzr8Y+8dQXeJLKftwig2qKWn8acHPHQfpPmDI2Avl
# XFvXbYf6hCAlNDFnzbYSlm/EUExiHQwIgqgWvalWzxVzjQEiJc6VaT9Hd/tydBTX
# /6tPiix6q4XNQ1/tYLaqT5Fmniye4Iqs5f2MvGQmh2ySvZ180HAKfO+ovHVPulr3
# qRCyXen/KFSJ8NWKcXZl2szwcqMj+sAngkSumScbqyQeJsG33irr9p6xeZmBo1aG
# qwpFyd/EjaDnmPv7pp1yr8THwcFqcdnGE4AJxLafzYeHJLtPo0m5d2aR8XKc6UsC
# Uqc3fpNTrDsdCEkPlM05et3/JWOZJyw9P2un8WbDQc1PtkCbISFA0LcTJM3cHXg6
# 5J6t5TRxktcma+Q4c6umAU+9Pzt4rUyt+8SVe+0KXzM5h0F4ejjpnOHdI/0dKNPH
# +ejxmF/7K9h+8kaddSweJywm228Vex4Ziza4k9Tm8heZWcpw8De/mADfIBZPJ/tg
# ZxahZrrdVcA6KYawmKAr7ZVBtzrVFZgxtGIJDwq9gdkT/r+k0fNX2bwE+oLeMt8E
# ifAAzV3C+dAjfwAL5HYCJtnwZXZCpimHCUcr5n8apIUP/JiW9lVUKx+A+sDyDivl
# 1vupL0QVSucTDh3bNzgaoSv27dZ8/DCCBY0wggR1oAMCAQICEA6bGI750C3n79tQ
# 4ghAGFowDQYJKoZIhvcNAQEMBQAwZTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERp
# Z2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTEkMCIGA1UEAxMb
# RGlnaUNlcnQgQXNzdXJlZCBJRCBSb290IENBMB4XDTIyMDgwMTAwMDAwMFoXDTMx
# MTEwOTIzNTk1OVowYjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IElu
# YzEZMBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTEhMB8GA1UEAxMYRGlnaUNlcnQg
# VHJ1c3RlZCBSb290IEc0MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA
# v+aQc2jeu+RdSjwwIjBpM+zCpyUuySE98orYWcLhKac9WKt2ms2uexuEDcQwH/Mb
# pDgW61bGl20dq7J58soR0uRf1gU8Ug9SH8aeFaV+vp+pVxZZVXKvaJNwwrK6dZlq
# czKU0RBEEC7fgvMHhOZ0O21x4i0MG+4g1ckgHWMpLc7sXk7Ik/ghYZs06wXGXuxb
# Grzryc/NrDRAX7F6Zu53yEioZldXn1RYjgwrt0+nMNlW7sp7XeOtyU9e5TXnMcva
# k17cjo+A2raRmECQecN4x7axxLVqGDgDEI3Y1DekLgV9iPWCPhCRcKtVgkEy19sE
# cypukQF8IUzUvK4bA3VdeGbZOjFEmjNAvwjXWkmkwuapoGfdpCe8oU85tRFYF/ck
# XEaPZPfBaYh2mHY9WV1CdoeJl2l6SPDgohIbZpp0yt5LHucOY67m1O+SkjqePdwA
# 5EUlibaaRBkrfsCUtNJhbesz2cXfSwQAzH0clcOP9yGyshG3u3/y1YxwLEFgqrFj
# GESVGnZifvaAsPvoZKYz0YkH4b235kOkGLimdwHhD5QMIR2yVCkliWzlDlJRR3S+
# Jqy2QXXeeqxfjT/JvNNBERJb5RBQ6zHFynIWIgnffEx1P2PsIV/EIFFrb7GrhotP
# wtZFX50g/KEexcCPorF+CiaZ9eRpL5gdLfXZqbId5RsCAwEAAaOCATowggE2MA8G
# A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOzX44LScV1kTN8uZz/nupiuHA9PMB8G
# A1UdIwQYMBaAFEXroq/0ksuCMS1Ri6enIZ3zbcgPMA4GA1UdDwEB/wQEAwIBhjB5
# BggrBgEFBQcBAQRtMGswJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0
# LmNvbTBDBggrBgEFBQcwAoY3aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0Rp
# Z2lDZXJ0QXNzdXJlZElEUm9vdENBLmNydDBFBgNVHR8EPjA8MDqgOKA2hjRodHRw
# Oi8vY3JsMy5kaWdpY2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3Js
# MBEGA1UdIAQKMAgwBgYEVR0gADANBgkqhkiG9w0BAQwFAAOCAQEAcKC/Q1xV5zhf
# oKN0Gz22Ftf3v1cHvZqsoYcs7IVeqRq7IviHGmlUIu2kiHdtvRoU9BNKei8ttzjv
# 9P+Aufih9/Jy3iS8UgPITtAq3votVs/59PesMHqai7Je1M/RQ0SbQyHrlnKhSLSZ
# y51PpwYDE3cnRNTnf+hZqPC/Lwum6fI0POz3A8eHqNJMQBk1RmppVLC4oVaO7KTV
# Peix3P0c2PR3WlxUjG/voVA9/HYJaISfb8rbII01YBwCA8sgsKxYoA5AY8WYIsGy
# WfVVa88nq2x2zm8jLfR+cWojayL/ErhULSd+2DrZ8LaHlv1b0VysGMNNn3O3Aamf
# V6peKOK5lDGCA3YwggNyAgEBMHcwYzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRp
# Z2lDZXJ0LCBJbmMuMTswOQYDVQQDEzJEaWdpQ2VydCBUcnVzdGVkIEc0IFJTQTQw
# OTYgU0hBMjU2IFRpbWVTdGFtcGluZyBDQQIQBUSv85SdCDmmv9s/X+VhFjANBglg
# hkgBZQMEAgEFAKCB0TAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwHAYJKoZI
# hvcNAQkFMQ8XDTI0MDUyNzEzNDE0MVowKwYLKoZIhvcNAQkQAgwxHDAaMBgwFgQU
# ZvArMsLCyQ+CXc6qisnGTxmcz0AwLwYJKoZIhvcNAQkEMSIEINXN7U3VE6LCv9W+
# PQ13P3G+ROeeXysEHORzUK07zm2bMDcGCyqGSIb3DQEJEAIvMSgwJjAkMCIEINL2
# 5G3tdCLM0dRAV2hBNm+CitpVmq4zFq9NGprUDHgoMA0GCSqGSIb3DQEBAQUABIIC
# AEdHzAwuDuR7ykPtRm/CZOdM5y4/mALDs+b2y+KX6EidCI5uU7virMaWFqIl1wjB
# dTjqE9pgXL7133vGEn0e+AbiFeCkA5l+NswC8Dtz4IETEAi5IH+HtEd1fIpNbepU
# 81sjWLNZDyKHglRk4Wc40h2j8xVpT8Ax603JO2/wLGbxlPeI+ulWd+oKAlyDT80B
# kka8WWuLjl5M2X8/ZcrsB+ACQg52VIcQ/2eElwt1HY+5uCOq0wO8XSkogbghp+vH
# fWEyO3mxkjskEkERwmmFCr1T0HShrZCQrgEaQB3JeB7uUoufASt4klvXbCvT13e0
# RPXDVSVay/nepQuBwH+705q5zh1f36MD9TAbNB44Tez2Sr0+dcQkK2DgVZ1UV1Ob
# o7W5jtCFI9G1VMVtv95g8QVm7M9pmGnwPOpf1mPoIlg2aHpuOzp1PBVBnZaXEBKI
# tFn+pUvPU6I/j3jEBAbr0SWySb0cUKe4RnBvjEv4X/B49g+I/tBtsylzHD3icn7h
# pXrSFJ75V1grYRJakUwg2u4xGukZ7E/t7LyvnqNqbU+XeWnHJggRE4/Si7YFdtYy
# eLSlup6hzldyiL4Xk4ZMlLgQjW9yDdcTTOAwVe1BGcd7IgHsYhG2wRLcNGuLA4fh
# ihOxzaPWb4q3Mpny/FG0XZFYMTWBvZpMgDcyFo6aTJHr
# SIG # End signature block