Skip to content

Commit

Permalink
[Optimization engine] Improves ingestion resiliency in face of 0 byte…
Browse files Browse the repository at this point in the history
…s chunks (#1048)

Co-authored-by: Helder Pinto <[email protected]>
  • Loading branch information
helderpinto and Helder Pinto authored Oct 11, 2024
1 parent 7680d0c commit ae6f90b
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 35 deletions.
8 changes: 8 additions & 0 deletions docs/_resources/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,14 @@ Legend:
>
> 1. Cost Management export modules for subscriptions and resource groups.
🔍 Optimization engine
{: .fs-5 .fw-500 .mt-4 mb-0 }

> 🛠️ Fixed:
>
> 1. Exports ingestion issues in cases where exports come with empty lines ([#998](https://github.com/microsoft/finops-toolkit/issues/998))
> 1. Missing columns in EA savings plans exports ([#1026](https://github.com/microsoft/finops-toolkit/issues/1026))
<br><a name="latest"></a>

## 🪛 v0.6 Update 1
Expand Down
2 changes: 1 addition & 1 deletion src/optimization-engine/azuredeploy-nested.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -1075,7 +1075,7 @@ var runbooks = [
}
{
name: csvIngestRunbookName
version: '1.6.1.0'
version: '1.6.2.0'
description: 'Ingests CSV blobs as custom logs to Log Analytics'
type: 'PowerShell'
scriptUri: uri(templateLocation, 'runbooks/data-collection/${csvIngestRunbookName}.ps1')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ Write-Output "Found $($unprocessedBlobs.Count) new blobs to process..."

foreach ($blob in $unprocessedBlobs) {
$newProcessedTime = $blob.LastModified.UtcDateTime.ToString("yyyy'-'MM'-'dd'T'HH':'mm':'ss'.'fff'Z'")
Write-Output "About to process $($blob.Name)..."
Write-Output "About to process $($blob.Name) ($($blob.Length) bytes)..."
$blobFilePath = "$env:TEMP\$($blob.Name)"
Get-AzStorageBlobContent -CloudBlob $blob.ICloudBlob -Context $saCtx -Force -Destination $blobFilePath | Out-Null

Expand Down Expand Up @@ -255,44 +255,53 @@ foreach ($blob in $unprocessedBlobs) {
$csvObject = $chunkLines | ConvertFrom-Csv
$jsonObject = ConvertTo-Json -InputObject $csvObject

$res = Post-OMSData -workspaceId $workspaceId -sharedKey $sharedKey -body ([System.Text.Encoding]::UTF8.GetBytes($jsonObject)) -logType $logname -TimeStampField "Timestamp" -AzureEnvironment $cloudEnvironment
if ($res -ge 200 -and $res -lt 300)
if ($null -ne $jsonObject)
{
Write-Output "Succesfully uploaded $lineCounter $LogAnalyticsSuffix rows to Log Analytics"
if ($r.Peek() -lt 0) {
$lastProcessedLine = -1
}
else {
$lastProcessedLine = $linesProcessed - 1
}
$res = Post-OMSData -workspaceId $workspaceId -sharedKey $sharedKey -body ([System.Text.Encoding]::UTF8.GetBytes($jsonObject)) -logType $logname -TimeStampField "Timestamp" -AzureEnvironment $cloudEnvironment

$updatedLastProcessedLine = $lastProcessedLine
$updatedLastProcessedDateTime = $lastProcessedDateTime
if ($r.Peek() -lt 0) {
$updatedLastProcessedDateTime = $newProcessedTime
if ($res -ge 200 -and $res -lt 300)
{
Write-Output "Succesfully uploaded $lineCounter $LogAnalyticsSuffix rows to Log Analytics"
}
else
{
Write-Warning "Failed to upload $lineCounter $LogAnalyticsSuffix rows. Error code: $res"
$r.Dispose()
Remove-Item -Path $blobFilePath -Force
throw
}
$lastProcessedDateTime = $updatedLastProcessedDateTime
Write-Output "Updating last processed time / line to $($updatedLastProcessedDateTime) / $updatedLastProcessedLine"
$sqlStatement = "UPDATE [$LogAnalyticsIngestControlTable] SET LastProcessedLine = $updatedLastProcessedLine, LastProcessedDateTime = '$updatedLastProcessedDateTime' WHERE StorageContainerName = '$storageAccountSinkContainer'"
$dbToken = Get-AzAccessToken -ResourceUrl "https://$azureSqlDomain/"
$Conn = New-Object System.Data.SqlClient.SqlConnection("Server=tcp:$sqlserver,1433;Database=$sqldatabase;Encrypt=True;Connection Timeout=$SqlTimeout;")
$Conn.AccessToken = $dbToken.Token
$Conn.Open()
$Cmd=new-object system.Data.SqlClient.SqlCommand
$Cmd.Connection = $Conn
$Cmd.CommandText = $sqlStatement
$Cmd.CommandTimeout = $SqlTimeout
$Cmd.ExecuteReader()
$Conn.Close()
$Conn.Dispose()
}
else
else
{
Write-Warning "Failed to upload $lineCounter $LogAnalyticsSuffix rows. Error code: $res"
$r.Dispose()
Remove-Item -Path $blobFilePath -Force
throw
Write-Warning "Skipped uploading $lineCounter $LogAnalyticsSuffix rows. Null JSON object."
}

if ($r.Peek() -lt 0) {
$lastProcessedLine = -1
}
else {
$lastProcessedLine = $linesProcessed - 1
}

$updatedLastProcessedLine = $lastProcessedLine
$updatedLastProcessedDateTime = $lastProcessedDateTime
if ($r.Peek() -lt 0) {
$updatedLastProcessedDateTime = $newProcessedTime
}
$lastProcessedDateTime = $updatedLastProcessedDateTime
Write-Output "Updating last processed time / line to $($updatedLastProcessedDateTime) / $updatedLastProcessedLine"
$sqlStatement = "UPDATE [$LogAnalyticsIngestControlTable] SET LastProcessedLine = $updatedLastProcessedLine, LastProcessedDateTime = '$updatedLastProcessedDateTime' WHERE StorageContainerName = '$storageAccountSinkContainer'"
$dbToken = Get-AzAccessToken -ResourceUrl "https://$azureSqlDomain/"
$Conn = New-Object System.Data.SqlClient.SqlConnection("Server=tcp:$sqlserver,1433;Database=$sqldatabase;Encrypt=True;Connection Timeout=$SqlTimeout;")
$Conn.AccessToken = $dbToken.Token
$Conn.Open()
$Cmd=new-object system.Data.SqlClient.SqlCommand
$Cmd.Connection = $Conn
$Cmd.CommandText = $sqlStatement
$Cmd.CommandTimeout = $SqlTimeout
$Cmd.ExecuteReader()
$Conn.Close()
$Conn.Dispose()

$chunkLines = @()
$chunkLines += $header
Expand Down
2 changes: 1 addition & 1 deletion src/optimization-engine/upgrade-manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@
{
"runbook": {
"name": "runbooks/data-collection/Ingest-OptimizationCSVExportsToLogAnalytics.ps1",
"version": "1.6.1.0"
"version": "1.6.2.0"
},
"source": "dataCollection"
},
Expand Down

0 comments on commit ae6f90b

Please sign in to comment.