From ae6f90b0d1ae391a71722b05f4668eec3fbba67d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?H=C3=A9lder=20Pinto?= Date: Fri, 11 Oct 2024 16:05:04 +0100 Subject: [PATCH] [Optimization engine] Improves ingestion resiliency in face of 0 bytes chunks (#1048) Co-authored-by: Helder Pinto --- docs/_resources/changelog.md | 8 ++ .../azuredeploy-nested.bicep | 2 +- ...t-OptimizationCSVExportsToLogAnalytics.ps1 | 75 +++++++++++-------- src/optimization-engine/upgrade-manifest.json | 2 +- 4 files changed, 52 insertions(+), 35 deletions(-) diff --git a/docs/_resources/changelog.md b/docs/_resources/changelog.md index 1a379a139..1eff588ce 100644 --- a/docs/_resources/changelog.md +++ b/docs/_resources/changelog.md @@ -81,6 +81,14 @@ Legend: > > 1. Cost Management export modules for subscriptions and resource groups. +🔍 Optimization engine +{: .fs-5 .fw-500 .mt-4 mb-0 } + +> 🛠️ Fixed: +> +> 1. Exports ingestion issues in cases where exports come with empty lines ([#998](https://github.com/microsoft/finops-toolkit/issues/998)) +> 1. Missing columns in EA savings plans exports ([#1026](https://github.com/microsoft/finops-toolkit/issues/1026)) +
## 🪛 v0.6 Update 1 diff --git a/src/optimization-engine/azuredeploy-nested.bicep b/src/optimization-engine/azuredeploy-nested.bicep index 1491eef0d..8267b138f 100644 --- a/src/optimization-engine/azuredeploy-nested.bicep +++ b/src/optimization-engine/azuredeploy-nested.bicep @@ -1075,7 +1075,7 @@ var runbooks = [ } { name: csvIngestRunbookName - version: '1.6.1.0' + version: '1.6.2.0' description: 'Ingests CSV blobs as custom logs to Log Analytics' type: 'PowerShell' scriptUri: uri(templateLocation, 'runbooks/data-collection/${csvIngestRunbookName}.ps1') diff --git a/src/optimization-engine/runbooks/data-collection/Ingest-OptimizationCSVExportsToLogAnalytics.ps1 b/src/optimization-engine/runbooks/data-collection/Ingest-OptimizationCSVExportsToLogAnalytics.ps1 index dc6a6206f..a162e9931 100644 --- a/src/optimization-engine/runbooks/data-collection/Ingest-OptimizationCSVExportsToLogAnalytics.ps1 +++ b/src/optimization-engine/runbooks/data-collection/Ingest-OptimizationCSVExportsToLogAnalytics.ps1 @@ -224,7 +224,7 @@ Write-Output "Found $($unprocessedBlobs.Count) new blobs to process..." foreach ($blob in $unprocessedBlobs) { $newProcessedTime = $blob.LastModified.UtcDateTime.ToString("yyyy'-'MM'-'dd'T'HH':'mm':'ss'.'fff'Z'") - Write-Output "About to process $($blob.Name)..." + Write-Output "About to process $($blob.Name) ($($blob.Length) bytes)..." $blobFilePath = "$env:TEMP\$($blob.Name)" Get-AzStorageBlobContent -CloudBlob $blob.ICloudBlob -Context $saCtx -Force -Destination $blobFilePath | Out-Null @@ -255,44 +255,53 @@ foreach ($blob in $unprocessedBlobs) { $csvObject = $chunkLines | ConvertFrom-Csv $jsonObject = ConvertTo-Json -InputObject $csvObject - $res = Post-OMSData -workspaceId $workspaceId -sharedKey $sharedKey -body ([System.Text.Encoding]::UTF8.GetBytes($jsonObject)) -logType $logname -TimeStampField "Timestamp" -AzureEnvironment $cloudEnvironment - if ($res -ge 200 -and $res -lt 300) + if ($null -ne $jsonObject) { - Write-Output "Succesfully uploaded $lineCounter $LogAnalyticsSuffix rows to Log Analytics" - if ($r.Peek() -lt 0) { - $lastProcessedLine = -1 - } - else { - $lastProcessedLine = $linesProcessed - 1 - } + $res = Post-OMSData -workspaceId $workspaceId -sharedKey $sharedKey -body ([System.Text.Encoding]::UTF8.GetBytes($jsonObject)) -logType $logname -TimeStampField "Timestamp" -AzureEnvironment $cloudEnvironment - $updatedLastProcessedLine = $lastProcessedLine - $updatedLastProcessedDateTime = $lastProcessedDateTime - if ($r.Peek() -lt 0) { - $updatedLastProcessedDateTime = $newProcessedTime + if ($res -ge 200 -and $res -lt 300) + { + Write-Output "Succesfully uploaded $lineCounter $LogAnalyticsSuffix rows to Log Analytics" + } + else + { + Write-Warning "Failed to upload $lineCounter $LogAnalyticsSuffix rows. Error code: $res" + $r.Dispose() + Remove-Item -Path $blobFilePath -Force + throw } - $lastProcessedDateTime = $updatedLastProcessedDateTime - Write-Output "Updating last processed time / line to $($updatedLastProcessedDateTime) / $updatedLastProcessedLine" - $sqlStatement = "UPDATE [$LogAnalyticsIngestControlTable] SET LastProcessedLine = $updatedLastProcessedLine, LastProcessedDateTime = '$updatedLastProcessedDateTime' WHERE StorageContainerName = '$storageAccountSinkContainer'" - $dbToken = Get-AzAccessToken -ResourceUrl "https://$azureSqlDomain/" - $Conn = New-Object System.Data.SqlClient.SqlConnection("Server=tcp:$sqlserver,1433;Database=$sqldatabase;Encrypt=True;Connection Timeout=$SqlTimeout;") - $Conn.AccessToken = $dbToken.Token - $Conn.Open() - $Cmd=new-object system.Data.SqlClient.SqlCommand - $Cmd.Connection = $Conn - $Cmd.CommandText = $sqlStatement - $Cmd.CommandTimeout = $SqlTimeout - $Cmd.ExecuteReader() - $Conn.Close() - $Conn.Dispose() } - else + else { - Write-Warning "Failed to upload $lineCounter $LogAnalyticsSuffix rows. Error code: $res" - $r.Dispose() - Remove-Item -Path $blobFilePath -Force - throw + Write-Warning "Skipped uploading $lineCounter $LogAnalyticsSuffix rows. Null JSON object." + } + + if ($r.Peek() -lt 0) { + $lastProcessedLine = -1 + } + else { + $lastProcessedLine = $linesProcessed - 1 + } + + $updatedLastProcessedLine = $lastProcessedLine + $updatedLastProcessedDateTime = $lastProcessedDateTime + if ($r.Peek() -lt 0) { + $updatedLastProcessedDateTime = $newProcessedTime } + $lastProcessedDateTime = $updatedLastProcessedDateTime + Write-Output "Updating last processed time / line to $($updatedLastProcessedDateTime) / $updatedLastProcessedLine" + $sqlStatement = "UPDATE [$LogAnalyticsIngestControlTable] SET LastProcessedLine = $updatedLastProcessedLine, LastProcessedDateTime = '$updatedLastProcessedDateTime' WHERE StorageContainerName = '$storageAccountSinkContainer'" + $dbToken = Get-AzAccessToken -ResourceUrl "https://$azureSqlDomain/" + $Conn = New-Object System.Data.SqlClient.SqlConnection("Server=tcp:$sqlserver,1433;Database=$sqldatabase;Encrypt=True;Connection Timeout=$SqlTimeout;") + $Conn.AccessToken = $dbToken.Token + $Conn.Open() + $Cmd=new-object system.Data.SqlClient.SqlCommand + $Cmd.Connection = $Conn + $Cmd.CommandText = $sqlStatement + $Cmd.CommandTimeout = $SqlTimeout + $Cmd.ExecuteReader() + $Conn.Close() + $Conn.Dispose() $chunkLines = @() $chunkLines += $header diff --git a/src/optimization-engine/upgrade-manifest.json b/src/optimization-engine/upgrade-manifest.json index 3d8331d7a..457e0c712 100644 --- a/src/optimization-engine/upgrade-manifest.json +++ b/src/optimization-engine/upgrade-manifest.json @@ -304,7 +304,7 @@ { "runbook": { "name": "runbooks/data-collection/Ingest-OptimizationCSVExportsToLogAnalytics.ps1", - "version": "1.6.1.0" + "version": "1.6.2.0" }, "source": "dataCollection" },