Update-AZTableEntity.psm1
$FunctionScriptName = "Update-AZTableEntity" Write-Verbose "Import-Start| [$($FunctionScriptName)]" <#* Changelog # Version 2.0.0 | PV 1.0.0 | 2023-04-03 + Added BATCH processing o cleanup # Version 1.X.X | PV 1.0.0 | 2020-05-29 + Initial release / Updates See global changelog.md for changes that impact out-of-script resources! #> #* Dependencies # Import-Function -Scope "Support" -Function "Get-SAStokenValidity" -Version "1.X" function Update-AZTableEntity { <# .SYNOPSIS Updates AZ Table entitys or creates .DESCRIPTION Updates AZ Table entitys or creates Default: Create if not exist. Merge if exist. set "RemoveExistingData" switch to override existing data Updates up to 100 entries in BATCH jobs .PARAMETER Config Config param Expects following values: StorageAccount / TableName / Sastoken Optional values: MaxBatch Directly set parameters overwrite this config .PARAMETER StorageAccount Storageaccount to use Overrides value from config param .PARAMETER TableName TableName to use Overrides value from config param .PARAMETER sastoken sastoken to use Overrides value from config param .PARAMETER Entity Entity to update Single Rowentity = Single update Multiple Rows in list = Batch .PARAMETER pipe Same as "Entity" Multiple are possible .PARAMETER PartitionKey PartitionKey to use DOES NOT Overrides value from Entity param Mandatory if BATCH job .PARAMETER RowKey RowKey to use DOES NOT Overrides value from Entity param .PARAMETER MaxBatch Entries to be used per batch job Betwwen 1 - 100 - Default: 100 .PARAMETER RemoveExistingData [switch] true => Deletes values for entity that aren´t listed in entity param .PARAMETER Batch [switch] true => Batchprocessing - Requires "Partitionkey" value .PARAMETER skipSAScheck ignore SAStoken check recommended if a lot of tables with the same token shall be used .EXAMPLE Update-AZTableEntity -Config $Config -Entity $Entity $Entity | Update-AZTableEntity -Config $Config -sastoken $sastoken .NOTES AUTHOR: Ken Dobrunz // Ken.Dobrunz@skaylink.com | Ken@Dobrunz.Tech LASTEDIT: 03.04.2023 - Version: 2.0 #> [cmdletbinding()] Param( #* Active data # Tableconfig - Config or other 3 needed [Parameter()]$Config, [Parameter()][Alias('Storage')]$StorageAccount, [Parameter()][Alias('Table')]$TableName, [Parameter()][Alias('sas')]$sastoken, [Parameter()]$MaxBatch, # Entity [parameter()]$Entity, [parameter(ValueFromPipeline = $True)]$pipe, [Parameter()][Alias('PKey')]$PartitionKey, # overrides if in entity / Mandatory if batch [Parameter()][Alias('RKey')]$RowKey, # overrides if in entity [Parameter()][Alias('Put')][switch]$RemoveExistingData, [Parameter()][switch]$batch, [Parameter()][switch]$skipSAScheck ) Begin { $SelfIdentifier = "UpdateTable" #* Check input / config $TableName = if ($TableName) { $TableName }elseif ($Config.Table) { $Config.Table }else { Write-Error "[$($SelfIdentifier)] No TableName provided" } $StorageAccount = if ($StorageAccount) { $StorageAccount }elseif ($Config.StorageAccount) { $Config.StorageAccount }else { Write-Error "[$($SelfIdentifier)] No StorageAccount provided" } $sastoken = if ($sastoken) { $sastoken }elseif ($Config.sastoken) { $Config.sastoken }else { Write-Error "[$($SelfIdentifier)] No sastoken provided" } $MaxBatch = if ($MaxBatch) { $MaxBatch }elseif ($Config.MaxBatch) { $Config.MaxBatch }else { 100 } #* Validity checks # SAS token if (!$skipSAScheck) { if (!(Get-SAStokenValidity -ReadOnlyObject -Table -sastoken $sastoken)) { throw "[$($SelfIdentifier)] Sastoken not valid" } } else { Write-Verbose "[$($SelfIdentifier)] Skipped sas check due to skip flag" } #* Batch Check if ($batch) { $Mode = "Batch" if ($null -eq $PartitionKey) { Write-Error "[$($SelfIdentifier)] No PartitionKey present for BATCH job" } #? Needs to be set & unique for batch #todo: autodetect and run multiple jobs from input # MaxBatch if ($MaxBatch -ge 1 -and $MaxBatch -le 100) { Write-Verbose "[$($SelfIdentifier)] MaxBatch set to [$MaxBatch]" } else { Write-Verbose "[$($SelfIdentifier)] MaxBatch invalid - Setting to default value [100]" $MaxBatch = 100 } } else { $Mode = "Single" } #* Set Method $Method = if ($RemoveExistingData) { "PUT" }else { "MERGE" } #* Preparations $table_uri = "https://$($StorageAccount).table.core.windows.net/$($TableName)" if ($mode -eq "Single") { $TableAPIHeader = @{ "x-ms-version" = "2019-07-07" Accept = "application/json;odata=minimalmetadata" } } else { #Batch Variables $BatchURI = "https://$($StorageAccount).table.core.windows.net/" + '$batch' + $sastoken #? Does not contain table name $TableAPIHeader = @{ "x-ms-version" = "2013-08-15" Accept = "application/json;odata=minimalmetadata" #"Content-Type" = "multipart/mixed; boundary=batch_XXXXXXXX" #? Will be set in "process" Connection = "Keep-Alive" DataServiceVersion = '3.0' "Accept-Charset" = "UTF-8" "MaxDataServiceVersion" = "3.0;NetFx" } } $functionverbosecount = 0 } Process { #* Select Single or Batch processing if ($Mode -eq "Single") { #? SINGLE job #* Set variables if ($Entity) { $pipe = $Entity } $RowKey = if ($pipe.RowKey) { $pipe.RowKey }elseif ($RowKey) { $RowKey }else { Write-Error "[$($SelfIdentifier)] No RowKey provided" } $PartitionKey = if ($pipe.PartitionKey) { $pipe.PartitionKey }elseif ($PartitionKey) { $PartitionKey }else { Write-Error "[$($SelfIdentifier)] No PartitionKey provided" } if (!$PartitionKey -or !$RowKey) { Write-Error "[$($SelfIdentifier)] No P/Rkey!" } #* Set Table $filter = "(PartitionKey='$($PartitionKey)',RowKey='$($Rowkey)')" Write-Debug "[$($SelfIdentifier)] [$($Method)] @ [$($table_uri)] - [$($filter)] " $body = ($pipe | ConvertTo-Json) #* Commit Invoke-RestMethod -Method $Method -Uri ($table_uri + $filter + $sastoken) -Headers $TableAPIHeader -Body $body -ContentType application/json $functionverbosecount++ } else { #? BATCH job #* Set required jobs $NeededRuns = [math]::ceiling(($Entity.count) / $MaxBatch) #? Returns number of jobs $CurRun = 1; $CurStack = 0 Write-Verbose "[$($SelfIdentifier)] [$NeededRuns] batch jobs required for [$($Entity.count)] rows" while ($CurRun -le $NeededRuns) { Write-Verbose "[$($SelfIdentifier)] Batch [$CurRun]/[$NeededRuns]"; $CurRun++ #* Batch Variables # Generic $BatchGUID = (New-Guid).Guid $ChangeSetGUID = (New-Guid).Guid $TableAPIHeader.'Content-Type' = "multipart/mixed; boundary=batch_$BatchGUID" # BatchBody $BatchBody_Start = @("--batch_$BatchGUID", "Content-Type: multipart/mixed; boundary=changeset_$ChangeSetGUID", "") $BatchBody_End = @("--changeset_$ChangeSetGUID--", "--batch_$BatchGUID") #* create current batch $BatchBody_Changes = @() ($Entity[$CurStack..($CurStack + ($MaxBatch - 1))]) | ForEach-Object { $loopsink = @{object = $_ } $loopsink.uri = $table_uri + "(PartitionKey='$($PartitionKey)',RowKey='$($loopsink.object.Rowkey)')" $BatchBody_Changes += @( "--changeset_$ChangeSetGUID" "Content-Type: application/http" "Accept: application/json;odata=minimalmetadata" "Content-Transfer-Encoding: binary" "" "$Method $($loopsink.uri) HTTP/1.1" "Content-Type: application/json" "Accept: application/json;odata=minimalmetadata" "Prefer: return-no-content" "DataServiceVersion: 3.0;" "" "$($($loopsink.object) | ConvertTo-Json -Compress)" "" ) } #* Commit Data $batchbody_final = ($BatchBody_Start + $BatchBody_Changes + $BatchBody_End) -join "`n" Try { [void](Invoke-WebRequest -Method POST -Headers $TableAPIHeader -Uri $BatchURI -Body $batchbody_final) } catch { Write-Error "[$($SelfIdentifier)] Batch [$CurRun]/[$NeededRuns]" } #* AddCurStack value for next loop $CurStack = $Curstack + $MaxBatch } } } End { if ($functionverbosecount -gt 1) { Write-Verbose "[$($SelfIdentifier)] Updated $($functionverbosecount) entries" } } } #v2.0.1 Export-ModuleMember -Function * Write-Verbose "Import-END| [$($FunctionScriptName)]" |