Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
import dayjs from 'dayjs'
import { describe, expect, it } from 'vitest'

import { normalizeChartBuckets } from './ChartDataTransform.utils'
import type { LogsBarChartDatum } from './ProjectUsage.metrics'

describe('normalizeChartBuckets', () => {
const now = dayjs('2024-01-28T12:00:00.000Z')

describe('1hr interval', () => {
it('should create exactly 30 buckets with 2-minute intervals', () => {
const result = normalizeChartBuckets([], '1hr', now.toDate())

expect(result).toHaveLength(30)

// Check first bucket
expect(result[0].timestamp).toBe(now.subtract(60, 'minute').toISOString())

// Check last bucket
expect(result[29].timestamp).toBe(now.subtract(2, 'minute').toISOString())

// Check all buckets are 2 minutes apart
for (let i = 0; i < result.length - 1; i++) {
const diff = dayjs(result[i + 1].timestamp).diff(dayjs(result[i].timestamp), 'minute')
expect(diff).toBe(2)
}
})

it('should aggregate data points into correct 2-minute buckets', () => {
const data: LogsBarChartDatum[] = [
{
// First bucket starts at -60 minutes, so -60 to -59 minutes is in bucket 0
timestamp: now.subtract(60, 'minute').toISOString(),
ok_count: 10,
warning_count: 1,
error_count: 2,
},
{
timestamp: now.subtract(59, 'minute').add(30, 'second').toISOString(),
ok_count: 5,
warning_count: 0,
error_count: 1,
},
{
timestamp: now.subtract(30, 'minute').toISOString(),
ok_count: 20,
warning_count: 2,
error_count: 0,
},
]

const result = normalizeChartBuckets(data, '1hr', now.toDate())

// First bucket (60-58 minutes ago) should contain aggregated data
const firstBucket = result[0]
expect(firstBucket.ok_count).toBe(15) // 10 + 5
expect(firstBucket.warning_count).toBe(1) // 1 + 0
expect(firstBucket.error_count).toBe(3) // 2 + 1

// Bucket at 30 minutes ago
const bucket15 = result[15] // 30 minutes / 2 minutes per bucket = bucket 15
expect(bucket15.ok_count).toBe(20)
expect(bucket15.warning_count).toBe(2)
expect(bucket15.error_count).toBe(0)
})

it('should return empty buckets when no data provided', () => {
const result = normalizeChartBuckets([], '1hr', now.toDate())

expect(result).toHaveLength(30)
result.forEach((bucket) => {
expect(bucket.ok_count).toBe(0)
expect(bucket.warning_count).toBe(0)
expect(bucket.error_count).toBe(0)
})
})
})

describe('1day interval', () => {
it('should create exactly 24 buckets with 1-hour intervals', () => {
const result = normalizeChartBuckets([], '1day', now.toDate())

expect(result).toHaveLength(24)

// Check first bucket
expect(result[0].timestamp).toBe(now.subtract(24, 'hour').toISOString())

// Check last bucket
expect(result[23].timestamp).toBe(now.subtract(1, 'hour').toISOString())

// Check all buckets are 1 hour apart
for (let i = 0; i < result.length - 1; i++) {
const diff = dayjs(result[i + 1].timestamp).diff(dayjs(result[i].timestamp), 'hour')
expect(diff).toBe(1)
}
})

it('should aggregate multiple data points into hourly buckets', () => {
const data: LogsBarChartDatum[] = [
{
timestamp: now.subtract(23, 'hour').subtract(30, 'minute').toISOString(),
ok_count: 100,
warning_count: 5,
error_count: 3,
},
{
timestamp: now.subtract(23, 'hour').subtract(15, 'minute').toISOString(),
ok_count: 50,
warning_count: 2,
error_count: 1,
},
]

const result = normalizeChartBuckets(data, '1day', now.toDate())

// First bucket should contain aggregated data
expect(result[0].ok_count).toBe(150)
expect(result[0].warning_count).toBe(7)
expect(result[0].error_count).toBe(4)
})
})

describe('7day interval', () => {
it('should create exactly 28 buckets with 6-hour intervals', () => {
const result = normalizeChartBuckets([], '7day', now.toDate())

expect(result).toHaveLength(28)

// Check first bucket (7 days = 168 hours ago)
expect(result[0].timestamp).toBe(now.subtract(168, 'hour').toISOString())

// Check last bucket
expect(result[27].timestamp).toBe(now.subtract(6, 'hour').toISOString())

// Check all buckets are 6 hours apart
for (let i = 0; i < result.length - 1; i++) {
const diff = dayjs(result[i + 1].timestamp).diff(dayjs(result[i].timestamp), 'hour')
expect(diff).toBe(6)
}
})

it('should aggregate data points into 6-hour buckets', () => {
const data: LogsBarChartDatum[] = [
{
timestamp: now.subtract(167, 'hour').toISOString(),
ok_count: 1000,
warning_count: 10,
error_count: 5,
},
{
timestamp: now.subtract(165, 'hour').toISOString(),
ok_count: 500,
warning_count: 5,
error_count: 2,
},
]

const result = normalizeChartBuckets(data, '7day', now.toDate())

// First bucket should contain aggregated data
expect(result[0].ok_count).toBe(1500)
expect(result[0].warning_count).toBe(15)
expect(result[0].error_count).toBe(7)
})
})

describe('edge cases', () => {
it('should handle data points outside the time range', () => {
const data: LogsBarChartDatum[] = [
{
timestamp: now.subtract(120, 'minute').toISOString(), // Outside 1hr range
ok_count: 100,
warning_count: 10,
error_count: 5,
},
{
timestamp: now.add(10, 'minute').toISOString(), // Future data
ok_count: 50,
warning_count: 5,
error_count: 2,
},
{
timestamp: now.subtract(30, 'minute').toISOString(), // Within range
ok_count: 25,
warning_count: 2,
error_count: 1,
},
]

const result = normalizeChartBuckets(data, '1hr', now.toDate())

// Should only include the data within range
const validBucket = result[15] // 30 minutes ago
expect(validBucket.ok_count).toBe(25)
expect(validBucket.warning_count).toBe(2)
expect(validBucket.error_count).toBe(1)

// Other buckets should be empty
expect(result[0].ok_count).toBe(0)
expect(result[29].ok_count).toBe(0)
})

it('should handle undefined/null values in data', () => {
const data: LogsBarChartDatum[] = [
{
timestamp: now.subtract(30, 'minute').toISOString(),
ok_count: undefined as any,
warning_count: null as any,
error_count: 5,
},
]

const result = normalizeChartBuckets(data, '1hr', now.toDate())

const bucket = result[15]
expect(bucket.ok_count).toBe(0)
expect(bucket.warning_count).toBe(0)
expect(bucket.error_count).toBe(5)
})
})
})
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import dayjs from 'dayjs'
import type { LogsBarChartDatum } from './ProjectUsage.metrics'

/**
* Configuration for chart bucket sizes based on time interval
*/
const BUCKET_CONFIG = {
'1hr': {
bucketMinutes: 2, // 2-minute buckets
expectedBuckets: 30, // 60 minutes / 2 = 30 buckets
},
'1day': {
bucketMinutes: 60, // 1-hour buckets
expectedBuckets: 24, // 24 hours
},
'7day': {
bucketMinutes: 360, // 6-hour buckets
expectedBuckets: 28, // 168 hours / 6 = 28 buckets
},
} as const

type IntervalKey = keyof typeof BUCKET_CONFIG

/**
* Normalizes chart data to consistent bucket sizes regardless of backend data density.
*
* For 1hr interval: Creates 30 buckets of 2 minutes each
* For 1day interval: Creates 24 buckets of 1 hour each
* For 7day interval: Creates 28 buckets of 6 hours each
*
* This ensures consistent bar width in charts and proper data aggregation.
*
* @param data - Raw chart data from backend
* @param interval - Time interval key ('1hr', '1day', '7day')
* @param endDate - End date for the chart (defaults to now)
* @returns Array of exactly the expected number of buckets with aggregated data
*/
export function normalizeChartBuckets(
data: LogsBarChartDatum[],
interval: IntervalKey,
endDate: Date = new Date()
): LogsBarChartDatum[] {
const config = BUCKET_CONFIG[interval]
const { bucketMinutes, expectedBuckets } = config

// Calculate start time based on expected buckets
const end = dayjs(endDate)
const start = end.subtract(expectedBuckets * bucketMinutes, 'minute')

// Create empty buckets
const buckets: LogsBarChartDatum[] = []
let currentBucketStart = start

for (let i = 0; i < expectedBuckets; i++) {
buckets.push({
timestamp: currentBucketStart.toISOString(),
ok_count: 0,
warning_count: 0,
error_count: 0,
})
currentBucketStart = currentBucketStart.add(bucketMinutes, 'minute')
}

// If no data, return empty buckets
if (!data || data.length === 0) {
return buckets
}

// Aggregate data into buckets
for (const datum of data) {
const datumTime = dayjs(datum.timestamp)

// Find which bucket this datum belongs to
const bucketIndex = Math.floor(datumTime.diff(start, 'minute') / bucketMinutes)

// Skip data points outside our time range
if (bucketIndex < 0 || bucketIndex >= expectedBuckets) {
continue
}

// Aggregate counts into the appropriate bucket
buckets[bucketIndex].ok_count += datum.ok_count || 0
buckets[bucketIndex].warning_count += datum.warning_count || 0
buckets[bucketIndex].error_count += datum.error_count || 0
}

return buckets
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { describe, it, expect } from 'vitest'
import {
computeChangePercent,
computeSuccessAndNonSuccessRates,
sumErrors,
sumTotal,
Expand Down Expand Up @@ -43,14 +42,4 @@ describe('ProjectUsage.metrics', () => {
expect(successRate).toBeCloseTo(87.5)
expect(nonSuccessRate).toBeCloseTo(12.5)
})

it('computeChangePercent handles zero previous safely', () => {
expect(computeChangePercent(10, 0)).toBe(100)
expect(computeChangePercent(0, 0)).toBe(0)
})

it('computeChangePercent returns standard percentage delta', () => {
expect(computeChangePercent(120, 100)).toBe(20)
expect(computeChangePercent(80, 100)).toBe(-20)
})
})
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,3 @@ export const computeSuccessAndNonSuccessRates = (
const successRate = 100 - nonSuccessRate
return { successRate, nonSuccessRate }
}

export const computeChangePercent = (current: number, previous: number): number => {
if (previous === 0) return current > 0 ? 100 : 0
return ((current - previous) / previous) * 100
}

export const formatDelta = (v: number): string => `${v >= 0 ? '+' : ''}${v.toFixed(1)}%`
Loading
Loading