diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt index 2e013740a..37ef71e73 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertService.kt @@ -74,9 +74,8 @@ import kotlin.coroutines.suspendCoroutine class AlertService( val client: Client, val xContentRegistry: NamedXContentRegistry, - val alertIndices: AlertIndices + val alertIndices: AlertIndices, ) { - companion object { const val MAX_BUCKET_LEVEL_MONITOR_ALERT_SEARCH_COUNT = 500 const val ERROR_ALERT_ID_PREFIX = "error-alert" @@ -86,15 +85,21 @@ class AlertService( private val logger = LogManager.getLogger(AlertService::class.java) - suspend fun loadCurrentAlertsForWorkflow(workflow: Workflow, dataSources: DataSources): Map { - val searchAlertsResponse: SearchResponse = searchAlerts( - workflow = workflow, - size = workflow.triggers.size * 2, // We expect there to be only a single in-progress alert so fetch 2 to check - dataSources = dataSources - ) + suspend fun loadCurrentAlertsForWorkflow( + workflow: Workflow, + dataSources: DataSources, + ): Map { + val searchAlertsResponse: SearchResponse = + searchAlerts( + workflow = workflow, + size = workflow.triggers.size * 2, // We expect there to be only a single in-progress alert so fetch 2 to check + dataSources = dataSources, + ) - val foundAlerts = searchAlertsResponse.hits.map { Alert.parse(contentParser(it.sourceRef), it.id, it.version) } - .groupBy { it.triggerId } + val foundAlerts = + searchAlertsResponse.hits + .map { Alert.parse(contentParser(it.sourceRef), it.id, it.version) } + .groupBy { it.triggerId } foundAlerts.values.forEach { alerts -> if (alerts.size > 1) { logger.warn("Found multiple alerts for same trigger: $alerts") @@ -106,15 +111,21 @@ class AlertService( } } - suspend fun loadCurrentAlertsForQueryLevelMonitor(monitor: Monitor, workflowRunContext: WorkflowRunContext?): Map { - val searchAlertsResponse: SearchResponse = searchAlerts( - monitor = monitor, - size = monitor.triggers.size * 2, // We expect there to be only a single in-progress alert so fetch 2 to check - workflowRunContext - ) + suspend fun loadCurrentAlertsForQueryLevelMonitor( + monitor: Monitor, + workflowRunContext: WorkflowRunContext?, + ): Map { + val searchAlertsResponse: SearchResponse = + searchAlerts( + monitor = monitor, + size = monitor.triggers.size * 2, // We expect there to be only a single in-progress alert so fetch 2 to check + workflowRunContext, + ) - val foundAlerts = searchAlertsResponse.hits.map { Alert.parse(contentParser(it.sourceRef), it.id, it.version) } - .groupBy { it.triggerId } + val foundAlerts = + searchAlertsResponse.hits + .map { Alert.parse(contentParser(it.sourceRef), it.id, it.version) } + .groupBy { it.triggerId } foundAlerts.values.forEach { alerts -> if (alerts.size > 1) { logger.warn("Found multiple alerts for same trigger: $alerts") @@ -130,23 +141,28 @@ class AlertService( monitor: Monitor, workflowRunContext: WorkflowRunContext?, ): Map> { - val searchAlertsResponse: SearchResponse = searchAlerts( - monitor = monitor, - // TODO: This should be limited based on a circuit breaker that limits Alerts - size = MAX_BUCKET_LEVEL_MONITOR_ALERT_SEARCH_COUNT, - workflowRunContext = workflowRunContext - ) + val searchAlertsResponse: SearchResponse = + searchAlerts( + monitor = monitor, + // TODO: This should be limited based on a circuit breaker that limits Alerts + size = MAX_BUCKET_LEVEL_MONITOR_ALERT_SEARCH_COUNT, + workflowRunContext = workflowRunContext, + ) - val foundAlerts = searchAlertsResponse.hits.map { Alert.parse(contentParser(it.sourceRef), it.id, it.version) } - .groupBy { it.triggerId } + val foundAlerts = + searchAlertsResponse.hits + .map { Alert.parse(contentParser(it.sourceRef), it.id, it.version) } + .groupBy { it.triggerId } return monitor.triggers.associateWith { trigger -> // Default to an empty map if there are no Alerts found for a Trigger to make Alert categorization logic easier ( - foundAlerts[trigger.id]?.mapNotNull { alert -> - alert.aggregationResultBucket?.let { it.getBucketKeysHash() to alert } - }?.toMap()?.toMutableMap() ?: mutableMapOf() - ) + foundAlerts[trigger.id] + ?.mapNotNull { alert -> + alert.aggregationResultBucket?.let { it.getBucketKeysHash() to alert } + }?.toMap() + ?.toMutableMap() ?: mutableMapOf() + ) } } @@ -155,7 +171,7 @@ class AlertService( result: QueryLevelTriggerRunResult, alertError: AlertError?, executionId: String, - workflorwRunContext: WorkflowRunContext? + workflorwRunContext: WorkflowRunContext?, ): Alert? { val currentTime = Instant.now() val currentAlert = ctx.alert?.alert @@ -169,32 +185,40 @@ class AlertService( currentActionIds.add(actionId) val actionRunResult = result.actionResults[actionId] when { - actionRunResult == null -> updatedActionExecutionResults.add(actionExecutionResult) - actionRunResult.throttled -> + actionRunResult == null -> { + updatedActionExecutionResults.add(actionExecutionResult) + } + + actionRunResult.throttled -> { updatedActionExecutionResults.add( actionExecutionResult.copy( - throttledCount = actionExecutionResult.throttledCount + 1 - ) + throttledCount = actionExecutionResult.throttledCount + 1, + ), ) - else -> updatedActionExecutionResults.add(actionExecutionResult.copy(lastExecutionTime = actionRunResult.executionTime)) + } + + else -> { + updatedActionExecutionResults.add(actionExecutionResult.copy(lastExecutionTime = actionRunResult.executionTime)) + } } } // add action execution results which not exist in current alert updatedActionExecutionResults.addAll( - result.actionResults.filter { !currentActionIds.contains(it.key) } - .map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) } + result.actionResults + .filter { !currentActionIds.contains(it.key) } + .map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) }, ) } else { updatedActionExecutionResults.addAll( result.actionResults.map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) - } + }, ) } // Including a list of triggered clusters for cluster metrics monitors var triggeredClusters: MutableList? = null - if (result is ClusterMetricsTriggerRunResult) + if (result is ClusterMetricsTriggerRunResult) { result.clusterTriggerResults.forEach { if (it.triggered) { // Add an empty list if one isn't already present @@ -204,6 +228,7 @@ class AlertService( triggeredClusters!!.add(it.cluster) } } + } // Merge the alert's error message to the current alert's history val updatedHistory = currentAlert?.errorHistory.update(alertError) @@ -215,7 +240,7 @@ class AlertService( errorHistory = updatedHistory, actionExecutionResults = updatedActionExecutionResults, schemaVersion = IndexUtils.alertIndexSchemaVersion, - clusters = triggeredClusters + clusters = triggeredClusters, ) } else if (alertError == null && currentAlert?.isAcknowledged() == true) { null @@ -228,20 +253,30 @@ class AlertService( errorHistory = updatedHistory, actionExecutionResults = updatedActionExecutionResults, schemaVersion = IndexUtils.alertIndexSchemaVersion, - clusters = triggeredClusters + clusters = triggeredClusters, ) } else { - val alertState = if (workflorwRunContext?.auditDelegateMonitorAlerts == true) { - Alert.State.AUDIT - } else if (alertError == null) Alert.State.ACTIVE - else Alert.State.ERROR + val alertState = + if (workflorwRunContext?.auditDelegateMonitorAlerts == true) { + Alert.State.AUDIT + } else if (alertError == null) { + Alert.State.ACTIVE + } else { + Alert.State.ERROR + } Alert( - monitor = ctx.monitor, trigger = ctx.trigger, startTime = currentTime, - lastNotificationTime = currentTime, state = alertState, errorMessage = alertError?.message, - errorHistory = updatedHistory, actionExecutionResults = updatedActionExecutionResults, - schemaVersion = IndexUtils.alertIndexSchemaVersion, executionId = executionId, + monitor = ctx.monitor, + trigger = ctx.trigger, + startTime = currentTime, + lastNotificationTime = currentTime, + state = alertState, + errorMessage = alertError?.message, + errorHistory = updatedHistory, + actionExecutionResults = updatedActionExecutionResults, + schemaVersion = IndexUtils.alertIndexSchemaVersion, + executionId = executionId, workflowId = workflorwRunContext?.workflowId ?: "", - clusters = triggeredClusters + clusters = triggeredClusters, ) } } @@ -253,22 +288,31 @@ class AlertService( ctx: DocumentLevelTriggerExecutionContext, alertError: AlertError?, executionId: String, - workflorwRunContext: WorkflowRunContext? + workflorwRunContext: WorkflowRunContext?, ): Alert { val currentTime = Instant.now() - val alertState = if (workflorwRunContext?.auditDelegateMonitorAlerts == true) { - Alert.State.AUDIT - } else if (alertError == null) { - Alert.State.ACTIVE - } else { - Alert.State.ERROR - } + val alertState = + if (workflorwRunContext?.auditDelegateMonitorAlerts == true) { + Alert.State.AUDIT + } else if (alertError == null) { + Alert.State.ACTIVE + } else { + Alert.State.ERROR + } return Alert( - id = UUID.randomUUID().toString(), monitor = ctx.monitor, trigger = ctx.trigger, startTime = currentTime, - lastNotificationTime = currentTime, state = alertState, errorMessage = alertError?.message, - schemaVersion = IndexUtils.alertIndexSchemaVersion, findingIds = findings, relatedDocIds = relatedDocIds, - executionId = executionId, workflowId = workflorwRunContext?.workflowId ?: "" + id = UUID.randomUUID().toString(), + monitor = ctx.monitor, + trigger = ctx.trigger, + startTime = currentTime, + lastNotificationTime = currentTime, + state = alertState, + errorMessage = alertError?.message, + schemaVersion = IndexUtils.alertIndexSchemaVersion, + findingIds = findings, + relatedDocIds = relatedDocIds, + executionId = executionId, + workflowId = workflorwRunContext?.workflowId ?: "", ) } @@ -277,18 +321,26 @@ class AlertService( monitor: Monitor, alertError: AlertError, executionId: String?, - workflowRunContext: WorkflowRunContext? + workflowRunContext: WorkflowRunContext?, ): Alert { val currentTime = Instant.now() - val alertState = if (workflowRunContext?.auditDelegateMonitorAlerts == true) { - Alert.State.AUDIT - } else { - Alert.State.ERROR - } + val alertState = + if (workflowRunContext?.auditDelegateMonitorAlerts == true) { + Alert.State.AUDIT + } else { + Alert.State.ERROR + } return Alert( - id = id, monitor = monitor, trigger = NoOpTrigger(), startTime = currentTime, - lastNotificationTime = currentTime, state = alertState, errorMessage = alertError.message, - schemaVersion = IndexUtils.alertIndexSchemaVersion, executionId = executionId, workflowId = workflowRunContext?.workflowId ?: "" + id = id, + monitor = monitor, + trigger = NoOpTrigger(), + startTime = currentTime, + lastNotificationTime = currentTime, + state = alertState, + errorMessage = alertError.message, + schemaVersion = IndexUtils.alertIndexSchemaVersion, + executionId = executionId, + workflowId = workflowRunContext?.workflowId ?: "", ) } @@ -300,7 +352,6 @@ class AlertService( result: ChainedAlertTriggerRunResult, alertError: AlertError? = null, ): Alert? { - val currentTime = Instant.now() val currentAlert = ctx.alert @@ -313,27 +364,34 @@ class AlertService( currentActionIds.add(actionId) val actionRunResult = result.actionResults[actionId] when { - actionRunResult == null -> updatedActionExecutionResults.add(actionExecutionResult) - actionRunResult.throttled -> + actionRunResult == null -> { + updatedActionExecutionResults.add(actionExecutionResult) + } + + actionRunResult.throttled -> { updatedActionExecutionResults.add( actionExecutionResult.copy( - throttledCount = actionExecutionResult.throttledCount + 1 - ) + throttledCount = actionExecutionResult.throttledCount + 1, + ), ) + } - else -> updatedActionExecutionResults.add(actionExecutionResult.copy(lastExecutionTime = actionRunResult.executionTime)) + else -> { + updatedActionExecutionResults.add(actionExecutionResult.copy(lastExecutionTime = actionRunResult.executionTime)) + } } } // add action execution results which not exist in current alert updatedActionExecutionResults.addAll( - result.actionResults.filter { !currentActionIds.contains(it.key) } - .map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) } + result.actionResults + .filter { !currentActionIds.contains(it.key) } + .map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) }, ) } else { updatedActionExecutionResults.addAll( result.actionResults.map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) - } + }, ) } @@ -346,7 +404,7 @@ class AlertService( errorMessage = null, errorHistory = updatedHistory, actionExecutionResults = updatedActionExecutionResults, - schemaVersion = IndexUtils.alertIndexSchemaVersion + schemaVersion = IndexUtils.alertIndexSchemaVersion, ) } else if (alertError == null && currentAlert?.isAcknowledged() == true) { null @@ -361,17 +419,21 @@ class AlertService( schemaVersion = IndexUtils.alertIndexSchemaVersion, ) } else { - if (alertError == null) Alert.State.ACTIVE - else Alert.State.ERROR + if (alertError == null) { + Alert.State.ACTIVE + } else { + Alert.State.ERROR + } Alert( startTime = Instant.now(), lastNotificationTime = currentTime, state = Alert.State.ACTIVE, - errorMessage = null, schemaVersion = IndexUtils.alertIndexSchemaVersion, + errorMessage = null, + schemaVersion = IndexUtils.alertIndexSchemaVersion, chainedAlertTrigger = ctx.trigger, executionId = executionId, workflow = workflow, - associatedAlertIds = associatedAlertIds + associatedAlertIds = associatedAlertIds, ) } } @@ -379,7 +441,7 @@ class AlertService( fun updateActionResultsForBucketLevelAlert( currentAlert: Alert, actionResults: Map, - alertError: AlertError? + alertError: AlertError?, ): Alert { val updatedActionExecutionResults = mutableListOf() val currentActionIds = mutableSetOf() @@ -389,21 +451,29 @@ class AlertService( currentActionIds.add(actionId) val actionRunResult = actionResults[actionId] when { - actionRunResult == null -> updatedActionExecutionResults.add(actionExecutionResult) - actionRunResult.throttled -> + actionRunResult == null -> { + updatedActionExecutionResults.add(actionExecutionResult) + } + + actionRunResult.throttled -> { updatedActionExecutionResults.add( actionExecutionResult.copy( - throttledCount = actionExecutionResult.throttledCount + 1 - ) + throttledCount = actionExecutionResult.throttledCount + 1, + ), ) - else -> updatedActionExecutionResults.add(actionExecutionResult.copy(lastExecutionTime = actionRunResult.executionTime)) + } + + else -> { + updatedActionExecutionResults.add(actionExecutionResult.copy(lastExecutionTime = actionRunResult.executionTime)) + } } } // Add action execution results not currently present in the alert updatedActionExecutionResults.addAll( - actionResults.filter { !currentActionIds.contains(it.key) } - .map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) } + actionResults + .filter { !currentActionIds.contains(it.key) } + .map { ActionExecutionResult(it.key, it.value.executionTime, if (it.value.throttled) 1 else 0) }, ) val updatedErrorHistory = currentAlert.errorHistory.update(alertError) @@ -414,7 +484,7 @@ class AlertService( state = Alert.State.ERROR, errorMessage = alertError.message, errorHistory = updatedErrorHistory, - actionExecutionResults = updatedActionExecutionResults + actionExecutionResults = updatedActionExecutionResults, ) } } @@ -429,7 +499,7 @@ class AlertService( aggResultBuckets: List, findings: List, executionId: String, - workflorwRunContext: WorkflowRunContext? + workflorwRunContext: WorkflowRunContext?, ): Map> { val dedupedAlerts = mutableListOf() val newAlerts = mutableListOf() @@ -445,23 +515,35 @@ class AlertService( currentAlerts.remove(aggAlertBucket.getBucketKeysHash()) } else { // New Alert - val alertState = if (workflorwRunContext?.auditDelegateMonitorAlerts == true) { - Alert.State.AUDIT - } else Alert.State.ACTIVE - val newAlert = Alert( - monitor = monitor, trigger = trigger, startTime = currentTime, - lastNotificationTime = currentTime, state = alertState, errorMessage = null, - errorHistory = mutableListOf(), actionExecutionResults = mutableListOf(), - schemaVersion = IndexUtils.alertIndexSchemaVersion, aggregationResultBucket = aggAlertBucket, - findingIds = findings, executionId = executionId, workflowId = workflorwRunContext?.workflowId ?: "" - ) + val alertState = + if (workflorwRunContext?.auditDelegateMonitorAlerts == true) { + Alert.State.AUDIT + } else { + Alert.State.ACTIVE + } + val newAlert = + Alert( + monitor = monitor, + trigger = trigger, + startTime = currentTime, + lastNotificationTime = currentTime, + state = alertState, + errorMessage = null, + errorHistory = mutableListOf(), + actionExecutionResults = mutableListOf(), + schemaVersion = IndexUtils.alertIndexSchemaVersion, + aggregationResultBucket = aggAlertBucket, + findingIds = findings, + executionId = executionId, + workflowId = workflorwRunContext?.workflowId ?: "", + ) newAlerts.add(newAlert) } } return mapOf( AlertCategory.DEDUPED to dedupedAlerts, - AlertCategory.NEW to newAlerts + AlertCategory.NEW to newAlerts, ) } @@ -469,8 +551,10 @@ class AlertService( val currentTime = Instant.now() return currentAlerts?.map { it.value.copy( - state = Alert.State.COMPLETED, endTime = currentTime, errorMessage = null, - schemaVersion = IndexUtils.alertIndexSchemaVersion + state = Alert.State.COMPLETED, + endTime = currentTime, + errorMessage = null, + schemaVersion = IndexUtils.alertIndexSchemaVersion, ) } ?: listOf() } @@ -483,16 +567,18 @@ class AlertService( ) { val newErrorAlertId = "$ERROR_ALERT_ID_PREFIX-${monitor.id}-${UUID.randomUUID()}" - val searchRequest = SearchRequest(monitor.dataSources.alertsIndex) - .source( - SearchSourceBuilder() - .sort(Alert.START_TIME_FIELD, SortOrder.DESC) - .query( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitor.id)) - .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)) - ) - ) + val searchRequest = + SearchRequest(monitor.dataSources.alertsIndex) + .source( + SearchSourceBuilder() + .sort(Alert.START_TIME_FIELD, SortOrder.DESC) + .query( + QueryBuilders + .boolQuery() + .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitor.id)) + .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)), + ), + ) val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } var alert = @@ -508,27 +594,30 @@ class AlertService( val existingErrorAlert = Alert.parse(xcp, hit.id, hit.version) val currentTime = Instant.now() - alert = if (alert.errorMessage != existingErrorAlert.errorMessage) { - var newErrorHistory = existingErrorAlert.errorHistory.update( - AlertError(existingErrorAlert.startTime, existingErrorAlert.errorMessage!!) - ) - alert.copy( - id = existingErrorAlert.id, - errorHistory = newErrorHistory, - startTime = currentTime, - lastNotificationTime = currentTime - ) - } else { - existingErrorAlert.copy(lastNotificationTime = currentTime) - } + alert = + if (alert.errorMessage != existingErrorAlert.errorMessage) { + var newErrorHistory = + existingErrorAlert.errorHistory.update( + AlertError(existingErrorAlert.startTime, existingErrorAlert.errorMessage!!), + ) + alert.copy( + id = existingErrorAlert.id, + errorHistory = newErrorHistory, + startTime = currentTime, + lastNotificationTime = currentTime, + ) + } else { + existingErrorAlert.copy(lastNotificationTime = currentTime) + } } - val alertIndexRequest = IndexRequest(monitor.dataSources.alertsIndex) - .routing(alert.monitorId) - .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) - .opType(DocWriteRequest.OpType.INDEX) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .id(alert.id) + val alertIndexRequest = + IndexRequest(monitor.dataSources.alertsIndex) + .routing(alert.monitorId) + .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) + .opType(DocWriteRequest.OpType.INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .id(alert.id) val indexResponse: IndexResponse = client.suspendUntil { index(alertIndexRequest, it) } logger.debug("Monitor error Alert successfully upserted. Op result: ${indexResponse.result}") @@ -537,18 +626,19 @@ class AlertService( suspend fun clearMonitorErrorAlert(monitor: Monitor) { val currentTime = Instant.now() try { - val searchRequest = SearchRequest("${monitor.dataSources.alertsIndex}") - .source( - SearchSourceBuilder() - .size(MAX_SEARCH_SIZE) - .sort(Alert.START_TIME_FIELD, SortOrder.DESC) - .query( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitor.id)) - .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)) - ) - - ) + val searchRequest = + SearchRequest("${monitor.dataSources.alertsIndex}") + .source( + SearchSourceBuilder() + .size(MAX_SEARCH_SIZE) + .sort(Alert.START_TIME_FIELD, SortOrder.DESC) + .query( + QueryBuilders + .boolQuery() + .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitor.id)) + .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)), + ), + ) searchRequest.cancelAfterTimeInterval = ALERTS_SEARCH_TIMEOUT val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } // If there's no error alert present, there's nothing to clear. We can stop here. @@ -565,20 +655,23 @@ class AlertService( val xcp = contentParser(hit.sourceRef) val existingErrorAlert = Alert.parse(xcp, hit.id, hit.version) - val updatedAlert = existingErrorAlert.copy( - endTime = currentTime - ) + val updatedAlert = + existingErrorAlert.copy( + endTime = currentTime, + ) - indexRequests += IndexRequest(monitor.dataSources.alertsIndex) - .routing(monitor.id) - .id(updatedAlert.id) - .source(updatedAlert.toXContentWithUser(XContentFactory.jsonBuilder())) - .opType(DocWriteRequest.OpType.INDEX) + indexRequests += + IndexRequest(monitor.dataSources.alertsIndex) + .routing(monitor.id) + .id(updatedAlert.id) + .source(updatedAlert.toXContentWithUser(XContentFactory.jsonBuilder())) + .opType(DocWriteRequest.OpType.INDEX) } - val bulkResponse: BulkResponse = client.suspendUntil { - bulk(BulkRequest().add(indexRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) - } + val bulkResponse: BulkResponse = + client.suspendUntil { + bulk(BulkRequest().add(indexRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + } if (bulkResponse.hasFailures()) { bulkResponse.items.forEach { item -> if (item.isFailed) { @@ -597,20 +690,25 @@ class AlertService( * Moves already cleared "error alerts" to history index. * Error Alert is cleared when endTime timestamp is set, on first successful run after failed run * */ - suspend fun moveClearedErrorAlertsToHistory(monitorId: String, alertIndex: String, alertHistoryIndex: String) { + suspend fun moveClearedErrorAlertsToHistory( + monitorId: String, + alertIndex: String, + alertHistoryIndex: String, + ) { try { - val searchRequest = SearchRequest(alertIndex) - .source( - SearchSourceBuilder() - .size(MAX_SEARCH_SIZE) - .query( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitorId)) - .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)) - .must(QueryBuilders.existsQuery(Alert.END_TIME_FIELD)) - ) - .version(true) // Do we need this? - ) + val searchRequest = + SearchRequest(alertIndex) + .source( + SearchSourceBuilder() + .size(MAX_SEARCH_SIZE) + .query( + QueryBuilders + .boolQuery() + .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitorId)) + .must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.ERROR.name)) + .must(QueryBuilders.existsQuery(Alert.END_TIME_FIELD)), + ).version(true), // Do we need this? + ) searchRequest.cancelAfterTimeInterval = ALERTS_SEARCH_TIMEOUT val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } @@ -634,13 +732,14 @@ class AlertService( .version(hit.version) .versionType(VersionType.EXTERNAL_GTE) .id(hit.id) - .timeout(MonitorRunnerService.monitorCtx.indexTimeout) + .timeout(MonitorRunnerService.monitorCtx.indexTimeout), ) } - val bulkResponse: BulkResponse = client.suspendUntil { - bulk(BulkRequest().add(copyRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) - } + val bulkResponse: BulkResponse = + client.suspendUntil { + bulk(BulkRequest().add(copyRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + } if (bulkResponse.hasFailures()) { bulkResponse.items.forEach { item -> if (item.isFailed) { @@ -654,19 +753,21 @@ class AlertService( val alertIds = searchResponse.hits.hits.map { it.id } - val deleteResponse: BulkByScrollResponse = suspendCoroutine { cont -> - DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) - .source(alertIndex) - .filter(QueryBuilders.termsQuery("_id", alertIds)) - .refresh(true) - .timeout(ALERTS_SEARCH_TIMEOUT) - .execute( - object : ActionListener { - override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) - override fun onFailure(t: Exception) = cont.resumeWithException(t) - } - ) - } + val deleteResponse: BulkByScrollResponse = + suspendCoroutine { cont -> + DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + .source(alertIndex) + .filter(QueryBuilders.termsQuery("_id", alertIds)) + .refresh(true) + .timeout(ALERTS_SEARCH_TIMEOUT) + .execute( + object : ActionListener { + override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) + + override fun onFailure(t: Exception) = cont.resumeWithException(t) + }, + ) + } deleteResponse.bulkFailures.forEach { logger.error("Failed deleting alert while moving cleared alerts: [${it.id}] cause: [${it.cause}] ") } @@ -680,79 +781,87 @@ class AlertService( alerts: List, retryPolicy: BackoffPolicy, allowUpdatingAcknowledgedAlert: Boolean = false, - routingId: String // routing is mandatory and set as monitor id. for workflow chained alerts we pass workflow id as routing + routingId: String, // routing is mandatory and set as monitor id. for workflow chained alerts we pass workflow id as routing ) { val alertsIndex = dataSources.alertsIndex val alertsHistoryIndex = dataSources.alertsHistoryIndex val commentIdsToDelete = mutableListOf() - var requestsToRetry = alerts.flatMap { alert -> - // We don't want to set the version when saving alerts because the MonitorRunner has first priority when writing alerts. - // In the rare event that a user acknowledges an alert between when it's read and when it's written - // back we're ok if that acknowledgement is lost. It's easier to get the user to retry than for the runner to - // spend time reloading the alert and writing it back. - when (alert.state) { - Alert.State.ACTIVE, Alert.State.ERROR -> { - listOf>( - IndexRequest(alertsIndex) - .routing(routingId) - .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) - .id(if (alert.id != Alert.NO_ID) alert.id else null) - ) - } - Alert.State.ACKNOWLEDGED -> { - // Allow ACKNOWLEDGED Alerts to be updated for Bucket-Level Monitors since de-duped Alerts can be ACKNOWLEDGED - // and updated by the MonitorRunner - if (allowUpdatingAcknowledgedAlert) { + var requestsToRetry = + alerts.flatMap { alert -> + // We don't want to set the version when saving alerts because the MonitorRunner has first priority when writing alerts. + // In the rare event that a user acknowledges an alert between when it's read and when it's written + // back we're ok if that acknowledgement is lost. It's easier to get the user to retry than for the runner to + // spend time reloading the alert and writing it back. + when (alert.state) { + Alert.State.ACTIVE, Alert.State.ERROR -> { listOf>( IndexRequest(alertsIndex) .routing(routingId) .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) - .id(if (alert.id != Alert.NO_ID) alert.id else null) + .id(if (alert.id != Alert.NO_ID) alert.id else null), ) - } else { - throw IllegalStateException("Unexpected attempt to save ${alert.state} alert: $alert") } - } - Alert.State.AUDIT -> { - val index = if (alertIndices.isAlertHistoryEnabled()) { - dataSources.alertsHistoryIndex - } else dataSources.alertsIndex - listOf>( - IndexRequest(index) - .routing(routingId) - .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) - .id(if (alert.id != Alert.NO_ID) alert.id else null) - ) - } - Alert.State.DELETED -> { - throw IllegalStateException("Unexpected attempt to save ${alert.state} alert: $alert") - } - Alert.State.COMPLETED -> { - listOfNotNull>( - DeleteRequest(alertsIndex, alert.id) - .routing(routingId), - if (alertIndices.isAlertHistoryEnabled()) { - // Only add completed alert to history index if history is enabled - IndexRequest(alertsHistoryIndex) - .routing(routingId) - .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) - .id(alert.id) + + Alert.State.ACKNOWLEDGED -> { + // Allow ACKNOWLEDGED Alerts to be updated for Bucket-Level Monitors since de-duped Alerts can be ACKNOWLEDGED + // and updated by the MonitorRunner + if (allowUpdatingAcknowledgedAlert) { + listOf>( + IndexRequest(alertsIndex) + .routing(routingId) + .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) + .id(if (alert.id != Alert.NO_ID) alert.id else null), + ) } else { - // Otherwise, prepare the Alert's comments for deletion, and don't include - // a request to index the Alert to an Alert history index. - // The delete request can't be added to the list of DocWriteRequests because - // Comments are stored in aliased history indices, not a concrete Comments - // index like Alerts. A DeleteBy request will be used to delete Comments, instead - // of a regular Delete request - commentIdsToDelete.addAll(CommentsUtils.getCommentIDsByAlertIDs(client, listOf(alert.id))) - null + throw IllegalStateException("Unexpected attempt to save ${alert.state} alert: $alert") } - ) + } + + Alert.State.AUDIT -> { + val index = + if (alertIndices.isAlertHistoryEnabled()) { + dataSources.alertsHistoryIndex + } else { + dataSources.alertsIndex + } + listOf>( + IndexRequest(index) + .routing(routingId) + .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) + .id(if (alert.id != Alert.NO_ID) alert.id else null), + ) + } + + Alert.State.DELETED -> { + throw IllegalStateException("Unexpected attempt to save ${alert.state} alert: $alert") + } + + Alert.State.COMPLETED -> { + listOfNotNull>( + DeleteRequest(alertsIndex, alert.id) + .routing(routingId), + if (alertIndices.isAlertHistoryEnabled()) { + // Only add completed alert to history index if history is enabled + IndexRequest(alertsHistoryIndex) + .routing(routingId) + .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) + .id(alert.id) + } else { + // Otherwise, prepare the Alert's comments for deletion, and don't include + // a request to index the Alert to an Alert history index. + // The delete request can't be added to the list of DocWriteRequests because + // Comments are stored in aliased history indices, not a concrete Comments + // index like Alerts. A DeleteBy request will be used to delete Comments, instead + // of a regular Delete request + commentIdsToDelete.addAll(CommentsUtils.getCommentIDsByAlertIDs(client, listOf(alert.id))) + null + }, + ) + } } } - } if (requestsToRetry.isEmpty()) return // Retry Bulk requests if there was any 429 response @@ -760,8 +869,10 @@ class AlertService( val bulkRequest = BulkRequest().add(requestsToRetry).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) val bulkResponse: BulkResponse = client.suspendUntil { client.bulk(bulkRequest, it) } val failedResponses = (bulkResponse.items ?: arrayOf()).filter { it.isFailed } - requestsToRetry = failedResponses.filter { it.status() == RestStatus.TOO_MANY_REQUESTS } - .map { bulkRequest.requests()[it.itemId] as IndexRequest } + requestsToRetry = + failedResponses + .filter { it.status() == RestStatus.TOO_MANY_REQUESTS } + .map { bulkRequest.requests()[it.itemId] as IndexRequest } if (requestsToRetry.isNotEmpty()) { val retryCause = failedResponses.first { it.status() == RestStatus.TOO_MANY_REQUESTS }.failure.cause @@ -781,23 +892,32 @@ class AlertService( * The Alerts are required with their indexed ID so that when the new Alerts are updated after the Action execution, * the ID is available for the index request so that the existing Alert can be updated, instead of creating a duplicate Alert document. */ - suspend fun saveNewAlerts(dataSources: DataSources, alerts: List, retryPolicy: BackoffPolicy): List { + suspend fun saveNewAlerts( + dataSources: DataSources, + alerts: List, + retryPolicy: BackoffPolicy, + ): List { val savedAlerts = mutableListOf() var alertsBeingIndexed = alerts - var requestsToRetry: MutableList = alerts.map { alert -> - if (alert.state != Alert.State.ACTIVE && alert.state != Alert.State.AUDIT) { - throw IllegalStateException("Unexpected attempt to save new alert [$alert] with state [${alert.state}]") - } - if (alert.id != Alert.NO_ID) { - throw IllegalStateException("Unexpected attempt to save new alert [$alert] with an existing alert ID [${alert.id}]") - } - val alertIndex = if (alert.state == Alert.State.AUDIT && alertIndices.isAlertHistoryEnabled()) { - dataSources.alertsHistoryIndex - } else dataSources.alertsIndex - IndexRequest(alertIndex) - .routing(alert.monitorId) - .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) - }.toMutableList() + var requestsToRetry: MutableList = + alerts + .map { alert -> + if (alert.state != Alert.State.ACTIVE && alert.state != Alert.State.AUDIT) { + throw IllegalStateException("Unexpected attempt to save new alert [$alert] with state [${alert.state}]") + } + if (alert.id != Alert.NO_ID) { + throw IllegalStateException("Unexpected attempt to save new alert [$alert] with an existing alert ID [${alert.id}]") + } + val alertIndex = + if (alert.state == Alert.State.AUDIT && alertIndices.isAlertHistoryEnabled()) { + dataSources.alertsHistoryIndex + } else { + dataSources.alertsIndex + } + IndexRequest(alertIndex) + .routing(alert.monitorId) + .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) + }.toMutableList() if (requestsToRetry.isEmpty()) return listOf() @@ -839,10 +959,13 @@ class AlertService( } private fun contentParser(bytesReference: BytesReference): XContentParser { - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - bytesReference, XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + bytesReference, + XContentType.JSON, + ) XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) return xcp } @@ -853,22 +976,30 @@ class AlertService( * @param monitorId The Monitor to get Alerts for * @param size The number of search hits (Alerts) to return */ - private suspend fun searchAlerts(monitor: Monitor, size: Int, workflowRunContext: WorkflowRunContext?): SearchResponse { + private suspend fun searchAlerts( + monitor: Monitor, + size: Int, + workflowRunContext: WorkflowRunContext?, + ): SearchResponse { val monitorId = monitor.id val alertIndex = monitor.dataSources.alertsIndex - val queryBuilder = QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitorId)) + val queryBuilder = + QueryBuilders + .boolQuery() + .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitorId)) if (workflowRunContext != null) { queryBuilder.must(QueryBuilders.termQuery(Alert.WORKFLOW_ID_FIELD, workflowRunContext.workflowId)) } - val searchSourceBuilder = SearchSourceBuilder() - .size(size) - .query(queryBuilder) - - val searchRequest = SearchRequest(alertIndex) - .routing(monitorId) - .source(searchSourceBuilder) + val searchSourceBuilder = + SearchSourceBuilder() + .size(size) + .query(queryBuilder) + + val searchRequest = + SearchRequest(alertIndex) + .routing(monitorId) + .source(searchSourceBuilder) val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } if (searchResponse.status() != RestStatus.OK) { throw (searchResponse.firstFailureOrNull()?.cause ?: RuntimeException("Unknown error loading alerts")) @@ -891,16 +1022,20 @@ class AlertService( val workflowId = workflow.id val alertIndex = dataSources.alertsIndex - val queryBuilder = QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(Alert.WORKFLOW_ID_FIELD, workflowId)) - .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, "")) - val searchSourceBuilder = SearchSourceBuilder() - .size(size) - .query(queryBuilder) - - val searchRequest = SearchRequest(alertIndex) - .routing(workflowId) - .source(searchSourceBuilder) + val queryBuilder = + QueryBuilders + .boolQuery() + .must(QueryBuilders.termQuery(Alert.WORKFLOW_ID_FIELD, workflowId)) + .must(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, "")) + val searchSourceBuilder = + SearchSourceBuilder() + .size(size) + .query(queryBuilder) + + val searchRequest = + SearchRequest(alertIndex) + .routing(workflowId) + .source(searchSourceBuilder) val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } if (searchResponse.status() != RestStatus.OK) { throw (searchResponse.firstFailureOrNull()?.cause ?: RuntimeException("Unknown error loading alerts")) @@ -908,13 +1043,12 @@ class AlertService( return searchResponse } - private fun List?.update(alertError: AlertError?): List { - return when { + private fun List?.update(alertError: AlertError?): List = + when { this == null && alertError == null -> emptyList() this != null && alertError == null -> this this == null && alertError != null -> listOf(alertError) this != null && alertError != null -> (listOf(alertError) + this).take(10) else -> throw IllegalStateException("Unreachable code reached!") } - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt index 5f03dfa5f..e5c299316 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingPlugin.kt @@ -40,7 +40,7 @@ import org.opensearch.alerting.core.settings.ScheduledJobSettings import org.opensearch.alerting.modelv2.MonitorV2 import org.opensearch.alerting.remote.monitors.RemoteMonitorRegistry import org.opensearch.alerting.resthandler.RestAcknowledgeAlertAction -import org.opensearch.alerting.resthandler.RestAcknowledgeChainedAlertAction +import org.opensearch.alerting.resthandler.RestAcknowledgeChainedAlertsAction import org.opensearch.alerting.resthandler.RestDeleteAlertingCommentAction import org.opensearch.alerting.resthandler.RestDeleteMonitorAction import org.opensearch.alerting.resthandler.RestDeleteWorkflowAction @@ -88,7 +88,7 @@ import org.opensearch.alerting.transport.TransportGetAlertsAction import org.opensearch.alerting.transport.TransportGetDestinationsAction import org.opensearch.alerting.transport.TransportGetEmailAccountAction import org.opensearch.alerting.transport.TransportGetEmailGroupAction -import org.opensearch.alerting.transport.TransportGetFindingsSearchAction +import org.opensearch.alerting.transport.TransportGetFindingsAction import org.opensearch.alerting.transport.TransportGetMonitorAction import org.opensearch.alerting.transport.TransportGetRemoteIndexesAction import org.opensearch.alerting.transport.TransportGetWorkflowAction @@ -168,9 +168,14 @@ import java.util.function.Supplier * It also adds [Monitor.XCONTENT_REGISTRY], [SearchInput.XCONTENT_REGISTRY], [QueryLevelTrigger.XCONTENT_REGISTRY], * [BucketLevelTrigger.XCONTENT_REGISTRY], [ClusterMetricsInput.XCONTENT_REGISTRY] to the [NamedXContentRegistry] so that we are able to deserialize the custom named objects. */ -internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, ReloadablePlugin, - SearchPlugin, SystemIndexPlugin, PercolatorPluginExt() { - +internal class AlertingPlugin : + PercolatorPluginExt(), + PainlessExtension, + ActionPlugin, + ScriptPlugin, + ReloadablePlugin, + SearchPlugin, + SystemIndexPlugin { override fun getContextAllowlists(): Map, List> { val whitelist = AllowlistLoader.loadFromResourceFiles(javaClass, "org.opensearch.alerting.txt") return mapOf(TriggerScript.CONTEXT to listOf(whitelist)) @@ -178,19 +183,33 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R companion object { @JvmField val OPEN_SEARCH_DASHBOARDS_USER_AGENT = "OpenSearch-Dashboards" + @JvmField val UI_METADATA_EXCLUDE = arrayOf("monitor.${Monitor.UI_METADATA_FIELD}") + @JvmField val MONITOR_BASE_URI = "/_plugins/_alerting/monitors" + @JvmField val MONITOR_V2_BASE_URI = "/_plugins/_alerting/v2/monitors" + @JvmField val WORKFLOW_BASE_URI = "/_plugins/_alerting/workflows" + @JvmField val REMOTE_BASE_URI = "/_plugins/_alerting/remote" + @JvmField val DESTINATION_BASE_URI = "/_plugins/_alerting/destinations" + @JvmField val LEGACY_OPENDISTRO_MONITOR_BASE_URI = "/_opendistro/_alerting/monitors" + @JvmField val LEGACY_OPENDISTRO_DESTINATION_BASE_URI = "/_opendistro/_alerting/destinations" + @JvmField val EMAIL_ACCOUNT_BASE_URI = "$DESTINATION_BASE_URI/email_accounts" + @JvmField val EMAIL_GROUP_BASE_URI = "$DESTINATION_BASE_URI/email_groups" + @JvmField val LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI = "$LEGACY_OPENDISTRO_DESTINATION_BASE_URI/email_accounts" + @JvmField val LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI = "$LEGACY_OPENDISTRO_DESTINATION_BASE_URI/email_groups" + @JvmField val FINDING_BASE_URI = "/_plugins/_alerting/findings" + @JvmField val COMMENTS_BASE_URI = "/_plugins/_alerting/comments" @JvmField val ALERTING_JOB_TYPES = listOf("monitor", "workflow", "monitor_v2") @@ -217,9 +236,9 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R indexScopedSettings: IndexScopedSettings, settingsFilter: SettingsFilter, indexNameExpressionResolver: IndexNameExpressionResolver?, - nodesInCluster: Supplier - ): List { - return listOf( + nodesInCluster: Supplier, + ): List = + listOf( // Alerting V1 RestGetMonitorAction(), RestDeleteMonitorAction(), @@ -229,7 +248,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R RestExecuteMonitorAction(), RestExecuteWorkflowAction(), RestAcknowledgeAlertAction(), - RestAcknowledgeChainedAlertAction(), + RestAcknowledgeChainedAlertsAction(), RestScheduledJobStatsHandler("_alerting"), RestSearchEmailAccountAction(), RestGetEmailAccountAction(), @@ -245,19 +264,17 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R RestIndexAlertingCommentAction(), RestSearchAlertingCommentAction(), RestDeleteAlertingCommentAction(), - // Alerting V2 RestIndexMonitorV2Action(), RestExecuteMonitorV2Action(), RestDeleteMonitorV2Action(), RestGetMonitorV2Action(), RestSearchMonitorV2Action(settings, clusterService), - RestGetAlertsV2Action() + RestGetAlertsV2Action(), ) - } - override fun getActions(): List> { - return listOf( + override fun getActions(): List> = + listOf( // Alerting V1 ActionPlugin.ActionHandler(ScheduledJobsStatsAction.INSTANCE, ScheduledJobsStatsTransportAction::class.java), ActionPlugin.ActionHandler(AlertingActions.INDEX_MONITOR_ACTION_TYPE, TransportIndexMonitorAction::class.java), @@ -267,7 +284,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(AlertingActions.DELETE_MONITOR_ACTION_TYPE, TransportDeleteMonitorAction::class.java), ActionPlugin.ActionHandler(AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, TransportAcknowledgeAlertAction::class.java), ActionPlugin.ActionHandler( - AlertingActions.ACKNOWLEDGE_CHAINED_ALERTS_ACTION_TYPE, TransportAcknowledgeChainedAlertAction::class.java + AlertingActions.ACKNOWLEDGE_CHAINED_ALERTS_ACTION_TYPE, + TransportAcknowledgeChainedAlertAction::class.java, ), ActionPlugin.ActionHandler(GetEmailAccountAction.INSTANCE, TransportGetEmailAccountAction::class.java), ActionPlugin.ActionHandler(SearchEmailAccountAction.INSTANCE, TransportSearchEmailAccountAction::class.java), @@ -276,7 +294,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(GetDestinationsAction.INSTANCE, TransportGetDestinationsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_ALERTS_ACTION_TYPE, TransportGetAlertsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_WORKFLOW_ALERTS_ACTION_TYPE, TransportGetWorkflowAlertsAction::class.java), - ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsSearchAction::class.java), + ActionPlugin.ActionHandler(AlertingActions.GET_FINDINGS_ACTION_TYPE, TransportGetFindingsAction::class.java), ActionPlugin.ActionHandler(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, TransportIndexWorkflowAction::class.java), ActionPlugin.ActionHandler(AlertingActions.GET_WORKFLOW_ACTION_TYPE, TransportGetWorkflowAction::class.java), ActionPlugin.ActionHandler(AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, TransportDeleteWorkflowAction::class.java), @@ -286,19 +304,17 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R ActionPlugin.ActionHandler(ExecuteWorkflowAction.INSTANCE, TransportExecuteWorkflowAction::class.java), ActionPlugin.ActionHandler(GetRemoteIndexesAction.INSTANCE, TransportGetRemoteIndexesAction::class.java), ActionPlugin.ActionHandler(DocLevelMonitorFanOutAction.INSTANCE, TransportDocLevelMonitorFanOutAction::class.java), - // Alerting V2 ActionPlugin.ActionHandler(IndexMonitorV2Action.INSTANCE, TransportIndexMonitorV2Action::class.java), ActionPlugin.ActionHandler(GetMonitorV2Action.INSTANCE, TransportGetMonitorV2Action::class.java), ActionPlugin.ActionHandler(SearchMonitorV2Action.INSTANCE, TransportSearchMonitorV2Action::class.java), ActionPlugin.ActionHandler(DeleteMonitorV2Action.INSTANCE, TransportDeleteMonitorV2Action::class.java), ActionPlugin.ActionHandler(ExecuteMonitorV2Action.INSTANCE, TransportExecuteMonitorV2Action::class.java), - ActionPlugin.ActionHandler(GetAlertsV2Action.INSTANCE, TransportGetAlertsV2Action::class.java) + ActionPlugin.ActionHandler(GetAlertsV2Action.INSTANCE, TransportGetAlertsV2Action::class.java), ) - } - override fun getNamedXContent(): List { - return listOf( + override fun getNamedXContent(): List = + listOf( Monitor.XCONTENT_REGISTRY, MonitorV2.XCONTENT_REGISTRY, SearchInput.XCONTENT_REGISTRY, @@ -309,9 +325,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R DocumentLevelTrigger.XCONTENT_REGISTRY, ChainedAlertTrigger.XCONTENT_REGISTRY, RemoteMonitorTrigger.XCONTENT_REGISTRY, - Workflow.XCONTENT_REGISTRY + Workflow.XCONTENT_REGISTRY, ) - } override fun createComponents( client: Client, @@ -324,7 +339,7 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R nodeEnvironment: NodeEnvironment, namedWriteableRegistry: NamedWriteableRegistry, indexNameExpressionResolver: IndexNameExpressionResolver, - repositoriesServiceSupplier: Supplier + repositoriesServiceSupplier: Supplier, ): Collection { // Need to figure out how to use the OpenSearch DI classes rather than handwiring things here. val settings = environment.settings() @@ -333,36 +348,36 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R alertV2Indices = AlertV2Indices(settings, client, threadPool, clusterService) val alertService = AlertService(client, xContentRegistry, alertIndices) val triggerService = TriggerService(scriptService) - runner = MonitorRunnerService - .registerClusterService(clusterService) - .registerClient(client) - .registerNamedXContentRegistry(xContentRegistry) - .registerindexNameExpressionResolver(indexNameExpressionResolver) - .registerScriptService(scriptService) - .registerSettings(settings) - .registerThreadPool(threadPool) - .registerAlertIndices(alertIndices) - .registerAlertV2Indices(alertV2Indices) - .registerInputService( - InputService( - client, - scriptService, - namedWriteableRegistry, - xContentRegistry, - clusterService, - settings, - indexNameExpressionResolver - ) - ) - .registerTriggerService(triggerService) - .registerAlertService(alertService) - .registerDocLevelMonitorQueries(DocLevelMonitorQueries(client, clusterService)) - .registerJvmStats(JvmStats.jvmStats()) - .registerWorkflowService(WorkflowService(client, xContentRegistry)) - .registerLockService(lockService) - .registerConsumers() - .registerDestinationSettings() - .registerRemoteMonitors(monitorTypeToMonitorRunners) + runner = + MonitorRunnerService + .registerClusterService(clusterService) + .registerClient(client) + .registerNamedXContentRegistry(xContentRegistry) + .registerindexNameExpressionResolver(indexNameExpressionResolver) + .registerScriptService(scriptService) + .registerSettings(settings) + .registerThreadPool(threadPool) + .registerAlertIndices(alertIndices) + .registerAlertV2Indices(alertV2Indices) + .registerInputService( + InputService( + client, + scriptService, + namedWriteableRegistry, + xContentRegistry, + clusterService, + settings, + indexNameExpressionResolver, + ), + ).registerTriggerService(triggerService) + .registerAlertService(alertService) + .registerDocLevelMonitorQueries(DocLevelMonitorQueries(client, clusterService)) + .registerJvmStats(JvmStats.jvmStats()) + .registerWorkflowService(WorkflowService(client, xContentRegistry)) + .registerLockService(lockService) + .registerConsumers() + .registerDestinationSettings() + .registerRemoteMonitors(monitorTypeToMonitorRunners) scheduledJobIndices = ScheduledJobIndices(client.admin(), clusterService) commentsIndices = CommentsIndices(environment.settings(), client, threadPool, clusterService) docLevelMonitorQueries = DocLevelMonitorQueries(client, clusterService) @@ -377,14 +392,14 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R client, clusterService, xContentRegistry, - settings + settings, ) WorkflowMetadataService.initialize( client, clusterService, xContentRegistry, - settings + settings, ) DeleteMonitorService.initialize(client, lockService) @@ -400,12 +415,12 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R alertV2Mover, lockService, alertService, - triggerService + triggerService, ) } - override fun getSettings(): List> { - return listOf( + override fun getSettings(): List> = + listOf( ScheduledJobSettings.REQUEST_TIMEOUT, ScheduledJobSettings.SWEEP_BACKOFF_MILLIS, ScheduledJobSettings.SWEEP_BACKOFF_RETRY_COUNT, @@ -497,9 +512,8 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R AlertingSettings.ALERT_V2_PER_RESULT_TRIGGER_MAX_ALERTS, AlertingSettings.NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH, AlertingSettings.NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH, - AlertingV2Settings.ALERTING_V2_ENABLED + AlertingV2Settings.ALERTING_V2_ENABLED, ) - } override fun onIndexModule(indexModule: IndexModule) { if (indexModule.index.name == ScheduledJob.SCHEDULED_JOBS_INDEX) { @@ -507,32 +521,29 @@ internal class AlertingPlugin : PainlessExtension, ActionPlugin, ScriptPlugin, R } } - override fun getContexts(): List> { - return listOf(TriggerScript.CONTEXT) - } + override fun getContexts(): List> = listOf(TriggerScript.CONTEXT) - override fun getSystemIndexDescriptors(settings: Settings): Collection { - return listOf( + override fun getSystemIndexDescriptors(settings: Settings): Collection = + listOf( SystemIndexDescriptor(ALL_ALERT_INDEX_PATTERN, "Alerting Plugin system index pattern"), SystemIndexDescriptor(SCHEDULED_JOBS_INDEX, "Alerting Plugin Configuration index"), SystemIndexDescriptor(ALL_COMMENTS_INDEX_PATTERN, "Alerting Comments system index pattern"), - SystemIndexDescriptor(ALL_ALERT_V2_INDEX_PATTERN, "Alerting V2 Alerts index pattern") + SystemIndexDescriptor(ALL_ALERT_V2_INDEX_PATTERN, "Alerting V2 Alerts index pattern"), ) - } override fun reload(settings: Settings) { runner.reloadDestinationSettings(settings) } - override fun getPipelineAggregations(): List { - return listOf( - SearchPlugin.PipelineAggregationSpec( - BucketSelectorExtAggregationBuilder.NAME, - { sin: StreamInput -> BucketSelectorExtAggregationBuilder(sin) }, - { parser: XContentParser, agg_name: String -> BucketSelectorExtAggregationBuilder.parse(agg_name, parser) } - ).addResultReader({ sin: StreamInput -> BucketSelectorIndices(sin) }) + override fun getPipelineAggregations(): List = + listOf( + SearchPlugin + .PipelineAggregationSpec( + BucketSelectorExtAggregationBuilder.NAME, + { sin: StreamInput -> BucketSelectorExtAggregationBuilder(sin) }, + { parser: XContentParser, agg_name: String -> BucketSelectorExtAggregationBuilder.parse(agg_name, parser) }, + ).addResultReader({ sin: StreamInput -> BucketSelectorIndices(sin) }), ) - } override fun loadExtensions(loader: ExtensiblePlugin.ExtensionLoader) { for (monitorExtension in loader.loadExtensions(RemoteMonitorRunnerExtension::class.java)) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingV2Utils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingV2Utils.kt index 8a3936eb6..ec470c143 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/AlertingV2Utils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/AlertingV2Utils.kt @@ -49,12 +49,12 @@ object AlertingV2Utils { return IllegalStateException( "The ID given corresponds to an Alerting V2 Monitor, but a V1 Monitor was expected. " + "If you wish to operate on a V2 Monitor (e.g. PPL Monitor), please use " + - "the Alerting V2 APIs with endpoint prefix: $MONITOR_V2_BASE_URI." + "the Alerting V2 APIs with endpoint prefix: $MONITOR_V2_BASE_URI.", ) } else if (scheduledJob !is Monitor && scheduledJob !is Workflow) { return IllegalStateException( "The ID given corresponds to a scheduled job of unknown type: ${scheduledJob.javaClass.name}. " + - "Please validate the ID and ensure it corresponds to a valid Monitor." + "Please validate the ID and ensure it corresponds to a valid Monitor.", ) } return null @@ -67,12 +67,12 @@ object AlertingV2Utils { return IllegalStateException( "The ID given corresponds to an Alerting V1 Monitor, but a V2 Monitor was expected. " + "If you wish to operate on a V1 Monitor (e.g. Per Query, Per Document, etc), please use " + - "the Alerting V1 APIs with endpoint prefix: $MONITOR_BASE_URI." + "the Alerting V1 APIs with endpoint prefix: $MONITOR_BASE_URI.", ) } else if (scheduledJob !is MonitorV2) { return IllegalStateException( "The ID given corresponds to a scheduled job of unknown type: ${scheduledJob.javaClass.name}. " + - "Please validate the ID and ensure it corresponds to a valid Monitor." + "Please validate the ID and ensure it corresponds to a valid Monitor.", ) } return null @@ -96,15 +96,16 @@ object AlertingV2Utils { // Used in Get and Search monitor functionalities to return a "no results" response fun getEmptySearchResponse(): SearchResponse { - val internalSearchResponse = InternalSearchResponse( - SearchHits(emptyArray(), TotalHits(0L, Relation.EQUAL_TO), 0.0f), - InternalAggregations.from(Collections.emptyList()), - Suggest(Collections.emptyList()), - SearchProfileShardResults(Collections.emptyMap()), - false, - false, - 0 - ) + val internalSearchResponse = + InternalSearchResponse( + SearchHits(emptyArray(), TotalHits(0L, Relation.EQUAL_TO), 0.0f), + InternalAggregations.from(Collections.emptyList()), + Suggest(Collections.emptyList()), + SearchProfileShardResults(Collections.emptyMap()), + false, + false, + 0, + ) return SearchResponse( internalSearchResponse, @@ -114,7 +115,7 @@ object AlertingV2Utils { 0, 0, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY + SearchResponse.Clusters.EMPTY, ) } @@ -122,7 +123,7 @@ object AlertingV2Utils { action: Action, monitorCtx: MonitorRunnerExecutionContext, subject: String?, - message: String + message: String, ): String { val config = getConfigForNotificationAction(action, monitorCtx) if (config.destination == null && config.channel == null) { @@ -137,7 +138,7 @@ object AlertingV2Utils { if (config.destination?.isAllowed(monitorCtx.allowList) == false) { throw IllegalStateException( - "Monitor contains a Destination type that is not allowed: ${config.destination.type}" + "Monitor contains a Destination type that is not allowed: ${config.destination.type}", ) } @@ -146,7 +147,7 @@ object AlertingV2Utils { ?.sendNotification( monitorCtx.client!!, config.channel.getTitle(subject), - message + message, ) ?: actionResponseContent actionResponseContent = config.destination @@ -165,7 +166,7 @@ object AlertingV2Utils { */ private suspend fun getConfigForNotificationAction( action: Action, - monitorCtx: MonitorRunnerExecutionContext + monitorCtx: MonitorRunnerExecutionContext, ): NotificationActionConfigs { var destination: Destination? = null var notificationPermissionException: Exception? = null @@ -179,39 +180,45 @@ object AlertingV2Utils { // If the channel was not found, try to retrieve the Destination if (channel == null) { - destination = try { - val table = Table( - "asc", - "destination.name.keyword", - null, - 1, - 0, + destination = + try { + val table = + Table( + "asc", + "destination.name.keyword", + null, + 1, + 0, + null, + ) + val getDestinationsRequest = + GetDestinationsRequest( + action.destinationId, + 0L, + null, + table, + "ALL", + ) + + val getDestinationsResponse: GetDestinationsResponse = + monitorCtx.client!!.suspendUntil { + monitorCtx.client!!.execute(GetDestinationsAction.INSTANCE, getDestinationsRequest, it) + } + getDestinationsResponse.destinations.firstOrNull() + } catch (e: IllegalStateException) { + // Catching the exception thrown when the Destination was not found so the NotificationActionConfigs object can be returned null - ) - val getDestinationsRequest = GetDestinationsRequest( - action.destinationId, - 0L, - null, - table, - "ALL" - ) - - val getDestinationsResponse: GetDestinationsResponse = monitorCtx.client!!.suspendUntil { - monitorCtx.client!!.execute(GetDestinationsAction.INSTANCE, getDestinationsRequest, it) + } catch (e: OpenSearchSecurityException) { + if (notificationPermissionException != null) { + throw notificationPermissionException + } else { + throw e + } } - getDestinationsResponse.destinations.firstOrNull() - } catch (e: IllegalStateException) { - // Catching the exception thrown when the Destination was not found so the NotificationActionConfigs object can be returned - null - } catch (e: OpenSearchSecurityException) { - if (notificationPermissionException != null) - throw notificationPermissionException - else - throw e - } - if (destination == null && notificationPermissionException != null) + if (destination == null && notificationPermissionException != null) { throw notificationPermissionException + } } return NotificationActionConfigs(destination, channel) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt index 49c00adef..7f4496758 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/BucketLevelMonitorRunner.kt @@ -74,7 +74,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { dryrun: Boolean, workflowRunContext: WorkflowRunContext?, executionId: String, - transportService: TransportService + transportService: TransportService, ): MonitorRunResult { val roles = MonitorRunnerService.getRolesForMonitor(monitor) logger.debug("Running monitor: ${monitor.name} with roles: $roles Thread: ${Thread.currentThread().name}") @@ -84,19 +84,20 @@ object BucketLevelMonitorRunner : MonitorRunner() { } var monitorResult = MonitorRunResult(monitor.name, periodStart, periodEnd) - val currentAlerts = try { - monitorCtx.alertIndices!!.createOrUpdateAlertIndex(monitor.dataSources) - monitorCtx.alertIndices!!.createOrUpdateInitialAlertHistoryIndex(monitor.dataSources) - if (monitor.dataSources.findingsEnabled == true) { - monitorCtx.alertIndices!!.createOrUpdateInitialFindingHistoryIndex(monitor.dataSources) + val currentAlerts = + try { + monitorCtx.alertIndices!!.createOrUpdateAlertIndex(monitor.dataSources) + monitorCtx.alertIndices!!.createOrUpdateInitialAlertHistoryIndex(monitor.dataSources) + if (monitor.dataSources.findingsEnabled == true) { + monitorCtx.alertIndices!!.createOrUpdateInitialFindingHistoryIndex(monitor.dataSources) + } + monitorCtx.alertService!!.loadCurrentAlertsForBucketLevelMonitor(monitor, workflowRunContext) + } catch (e: Exception) { + // We can't save ERROR alerts to the index here as we don't know if there are existing ACTIVE alerts + val id = if (monitor.id.trim().isEmpty()) "_na_" else monitor.id + logger.error("Error loading alerts for monitor: $id", e) + return monitorResult.copy(error = e) } - monitorCtx.alertService!!.loadCurrentAlertsForBucketLevelMonitor(monitor, workflowRunContext) - } catch (e: Exception) { - // We can't save ERROR alerts to the index here as we don't know if there are existing ACTIVE alerts - val id = if (monitor.id.trim().isEmpty()) "_na_" else monitor.id - logger.error("Error loading alerts for monitor: $id", e) - return monitorResult.copy(error = e) - } /* * Since the aggregation query can consist of multiple pages, each iteration of the do-while loop only has partial results @@ -130,20 +131,21 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitorCtx.settings!!, monitorCtx.threadPool!!.threadContext, roles, - monitor.user - ) + monitor.user, + ), ) { // Storing the first page of results in the case of pagination input results to prevent empty results // in the final output of monitorResult which occurs when all pages have been exhausted. // If it's favorable to return the last page, will need to check how to accomplish that with multiple aggregation paths // with different page counts. - val inputResults = monitorCtx.inputService!!.collectInputResults( - monitor, - periodStart, - periodEnd, - monitorResult.inputResults, - workflowRunContext - ) + val inputResults = + monitorCtx.inputService!!.collectInputResults( + monitor, + periodStart, + periodEnd, + monitorResult.inputResults, + workflowRunContext, + ) if (firstIteration) { firstPageOfInputResults = inputResults firstIteration = false @@ -179,22 +181,24 @@ object BucketLevelMonitorRunner : MonitorRunner() { periodStart, periodEnd, !dryrun && monitor.id != Monitor.NO_ID, - executionId + executionId, ) } else { emptyList() } // TODO: Should triggerResult's aggregationResultBucket be a list? If not, getCategorizedAlertsForBucketLevelMonitor can // be refactored to use a map instead - val categorizedAlerts = monitorCtx.alertService!!.getCategorizedAlertsForBucketLevelMonitor( - monitor, - trigger, - currentAlertsForTrigger, - triggerResult.aggregationResultBuckets.values.toList(), - findings, - executionId, - workflowRunContext - ).toMutableMap() + val categorizedAlerts = + monitorCtx.alertService!! + .getCategorizedAlertsForBucketLevelMonitor( + monitor, + trigger, + currentAlertsForTrigger, + triggerResult.aggregationResultBuckets.values.toList(), + findings, + executionId, + workflowRunContext, + ).toMutableMap() val dedupedAlerts = categorizedAlerts.getOrDefault(AlertCategory.DEDUPED, emptyList()) var newAlerts = categorizedAlerts.getOrDefault(AlertCategory.NEW, emptyList()) @@ -213,18 +217,19 @@ object BucketLevelMonitorRunner : MonitorRunner() { dedupedAlerts, monitorCtx.retryPolicy!!, allowUpdatingAcknowledgedAlert = true, - monitor.id + monitor.id, ) newAlerts = monitorCtx.alertService!!.saveNewAlerts(monitor.dataSources, newAlerts, monitorCtx.retryPolicy!!) } // Store deduped and new Alerts to accumulate across pages if (!nextAlerts.containsKey(trigger.id)) { - nextAlerts[trigger.id] = mutableMapOf( - AlertCategory.DEDUPED to mutableListOf(), - AlertCategory.NEW to mutableListOf(), - AlertCategory.COMPLETED to mutableListOf() - ) + nextAlerts[trigger.id] = + mutableMapOf( + AlertCategory.DEDUPED to mutableListOf(), + AlertCategory.NEW to mutableListOf(), + AlertCategory.COMPLETED to mutableListOf(), + ) } nextAlerts[trigger.id]?.get(AlertCategory.DEDUPED)?.addAll(dedupedAlerts) nextAlerts[trigger.id]?.get(AlertCategory.NEW)?.addAll(newAlerts) @@ -235,9 +240,11 @@ object BucketLevelMonitorRunner : MonitorRunner() { // However, this operation will only be done if there was no trigger error, since otherwise the nextAlerts were not collected // in favor of just using the currentAlerts as-is. currentAlerts.forEach { (trigger, keysToAlertsMap) -> - if (triggerResults[trigger.id]?.error == null) - nextAlerts[trigger.id]?.get(AlertCategory.COMPLETED) + if (triggerResults[trigger.id]?.error == null) { + nextAlerts[trigger.id] + ?.get(AlertCategory.COMPLETED) ?.addAll(monitorCtx.alertService!!.convertToCompletedAlerts(keysToAlertsMap)) + } } // The alertSampleDocs map structure is Map>> @@ -247,9 +254,12 @@ object BucketLevelMonitorRunner : MonitorRunner() { val completedAlertsToUpdate = mutableSetOf() // Filter ACKNOWLEDGED Alerts from the deduped list so they do not have Actions executed for them. // New Alerts are ignored since they cannot be acknowledged yet. - val dedupedAlerts = nextAlerts[trigger.id]?.get(AlertCategory.DEDUPED) - ?.filterNot { it.state == Alert.State.ACKNOWLEDGED }?.toMutableList() - ?: mutableListOf() + val dedupedAlerts = + nextAlerts[trigger.id] + ?.get(AlertCategory.DEDUPED) + ?.filterNot { it.state == Alert.State.ACKNOWLEDGED } + ?.toMutableList() + ?: mutableListOf() // Update nextAlerts so the filtered DEDUPED Alerts are reflected for PER_ALERT Action execution nextAlerts[trigger.id]?.set(AlertCategory.DEDUPED, dedupedAlerts) @@ -257,21 +267,23 @@ object BucketLevelMonitorRunner : MonitorRunner() { val isTriggered = !nextAlerts[trigger.id]?.get(AlertCategory.NEW).isNullOrEmpty() if (isTriggered && printsSampleDocData(trigger)) { try { - val searchRequest = monitorCtx.inputService!!.getSearchRequest( - monitor = monitor.copy(triggers = listOf(trigger)), - searchInput = monitor.inputs[0] as SearchInput, - periodStart = periodStart, - periodEnd = periodEnd, - prevResult = monitorResult.inputResults, - matchingDocIdsPerIndex = null, - returnSampleDocs = true - ) - val sampleDocumentsByBucket = getSampleDocs( - client = monitorCtx.client!!, - monitorId = monitor.id, - triggerId = trigger.id, - searchRequest = searchRequest - ) + val searchRequest = + monitorCtx.inputService!!.getSearchRequest( + monitor = monitor.copy(triggers = listOf(trigger)), + searchInput = monitor.inputs[0] as SearchInput, + periodStart = periodStart, + periodEnd = periodEnd, + prevResult = monitorResult.inputResults, + matchingDocIdsPerIndex = null, + returnSampleDocs = true, + ) + val sampleDocumentsByBucket = + getSampleDocs( + client = monitorCtx.client!!, + monitorId = monitor.id, + triggerId = trigger.id, + searchRequest = searchRequest, + ) alertSampleDocs[trigger.id] = sampleDocumentsByBucket } catch (e: Exception) { logger.error("Error retrieving sample documents for trigger {} of monitor {}.", trigger.id, monitor.id, e) @@ -292,13 +304,14 @@ object BucketLevelMonitorRunner : MonitorRunner() { val triggerCtx = triggerContexts[trigger.id]!! val triggerResult = triggerResults[trigger.id]!! val monitorOrTriggerError = monitorResult.error ?: triggerResult.error - val shouldDefaultToPerExecution = defaultToPerExecutionAction( - monitorCtx.maxActionableAlertCount, - monitorId = monitor.id, - triggerId = trigger.id, - totalActionableAlertCount = dedupedAlerts.size + newAlerts.size + completedAlerts.size, - monitorOrTriggerError = monitorOrTriggerError - ) + val shouldDefaultToPerExecution = + defaultToPerExecutionAction( + monitorCtx.maxActionableAlertCount, + monitorId = monitor.id, + triggerId = trigger.id, + totalActionableAlertCount = dedupedAlerts.size + newAlerts.size + completedAlerts.size, + monitorOrTriggerError = monitorOrTriggerError, + ) for (action in trigger.actions) { // ActionExecutionPolicy should not be null for Bucket-Level Monitors since it has a default config when not set explicitly val actionExecutionScope = action.getActionExecutionPolicy(monitor)!!.actionExecutionScope @@ -306,21 +319,27 @@ object BucketLevelMonitorRunner : MonitorRunner() { for (alertCategory in actionExecutionScope.actionableAlerts) { val alertsToExecuteActionsFor = nextAlerts[trigger.id]?.get(alertCategory) ?: mutableListOf() val alertsToExecuteActionsForIds = alertsToExecuteActionsFor.map { it.id } - val allAlertsComments = CommentsUtils.getCommentsForAlertNotification( - monitorCtx.client!!, - alertsToExecuteActionsForIds, - maxComments - ) + val allAlertsComments = + CommentsUtils.getCommentsForAlertNotification( + monitorCtx.client!!, + alertsToExecuteActionsForIds, + maxComments, + ) for (alert in alertsToExecuteActionsFor) { - val alertContext = if (alertCategory != AlertCategory.NEW) { - AlertContext(alert = alert, comments = allAlertsComments[alert.id]) - } else { - getAlertContext(alert = alert, alertSampleDocs = alertSampleDocs, allAlertsComments[alert.id]) - } + val alertContext = + if (alertCategory != AlertCategory.NEW) { + AlertContext(alert = alert, comments = allAlertsComments[alert.id]) + } else { + getAlertContext(alert = alert, alertSampleDocs = alertSampleDocs, allAlertsComments[alert.id]) + } - val actionCtx = getActionContextForAlertCategory( - alertCategory, alertContext, triggerCtx, monitorOrTriggerError - ) + val actionCtx = + getActionContextForAlertCategory( + alertCategory, + alertContext, + triggerCtx, + monitorOrTriggerError, + ) // AggregationResultBucket should not be null here val alertBucketKeysHash = alert.aggregationResultBucket!!.getBucketKeysHash() if (!triggerResult.actionResultsMap.containsKey(alertBucketKeysHash)) { @@ -329,11 +348,12 @@ object BucketLevelMonitorRunner : MonitorRunner() { // Keeping the throttled response separate from runAction for now since // throttling is not supported for PER_EXECUTION - val actionResult = if (MonitorRunnerService.isActionActionable(action, alert)) { - this.runAction(action, actionCtx, monitorCtx, monitor, dryrun) - } else { - ActionRunResult(action.id, action.name, mapOf(), true, null, null) - } + val actionResult = + if (MonitorRunnerService.isActionActionable(action, alert)) { + this.runAction(action, actionCtx, monitorCtx, monitor, dryrun) + } else { + ActionRunResult(action.id, action.name, mapOf(), true, null, null) + } triggerResult.actionResultsMap[alertBucketKeysHash]?.set(action.id, actionResult) alertsToUpdate.add(alert) @@ -345,39 +365,50 @@ object BucketLevelMonitorRunner : MonitorRunner() { } else if (actionExecutionScope is PerExecutionActionScope || shouldDefaultToPerExecution) { // If all categories of Alerts are empty, there is nothing to message on and we can skip the Action. // If the error is not null, this is disregarded and the Action is executed anyway so the user can be notified. - if (monitorOrTriggerError == null && dedupedAlerts.isEmpty() && newAlerts.isEmpty() && completedAlerts.isEmpty()) + if (monitorOrTriggerError == null && dedupedAlerts.isEmpty() && newAlerts.isEmpty() && completedAlerts.isEmpty()) { continue + } - val alertsToExecuteActionsForIds = dedupedAlerts.map { it.id } - .plus(newAlerts.map { it.id }) - .plus(completedAlerts.map { it.id }) - val allAlertsComments = CommentsUtils.getCommentsForAlertNotification( - monitorCtx.client!!, - alertsToExecuteActionsForIds, - maxComments - ) - val actionCtx = triggerCtx.copy( - dedupedAlerts = dedupedAlerts.map { - AlertContext(alert = it, comments = allAlertsComments[it.id]) - }, - newAlerts = newAlerts.map { - getAlertContext( - alert = it, - alertSampleDocs = alertSampleDocs, - alertComments = allAlertsComments[it.id] - ) - }, - completedAlerts = completedAlerts.map { - AlertContext(alert = it, comments = allAlertsComments[it.id]) - }, - error = monitorResult.error ?: triggerResult.error - ) + val alertsToExecuteActionsForIds = + dedupedAlerts + .map { it.id } + .plus(newAlerts.map { it.id }) + .plus(completedAlerts.map { it.id }) + val allAlertsComments = + CommentsUtils.getCommentsForAlertNotification( + monitorCtx.client!!, + alertsToExecuteActionsForIds, + maxComments, + ) + val actionCtx = + triggerCtx.copy( + dedupedAlerts = + dedupedAlerts.map { + AlertContext(alert = it, comments = allAlertsComments[it.id]) + }, + newAlerts = + newAlerts.map { + getAlertContext( + alert = it, + alertSampleDocs = alertSampleDocs, + alertComments = allAlertsComments[it.id], + ) + }, + completedAlerts = + completedAlerts.map { + AlertContext(alert = it, comments = allAlertsComments[it.id]) + }, + error = monitorResult.error ?: triggerResult.error, + ) val actionResult = this.runAction(action, actionCtx, monitorCtx, monitor, dryrun) // If there was an error during trigger execution then the Alerts to be updated are the current Alerts since the state // was not changed. Otherwise, the Alerts to be updated are the sum of the deduped, new and completed Alerts. - val alertsToIterate = if (monitorOrTriggerError == null) { - (dedupedAlerts + newAlerts + completedAlerts) - } else currentAlerts[trigger]?.map { it.value } ?: listOf() + val alertsToIterate = + if (monitorOrTriggerError == null) { + (dedupedAlerts + newAlerts + completedAlerts) + } else { + currentAlerts[trigger]?.map { it.value } ?: listOf() + } // Save the Action run result for every Alert for (alert in alertsToIterate) { val alertBucketKeysHash = alert.aggregationResultBucket!!.getBucketKeysHash() @@ -395,23 +426,27 @@ object BucketLevelMonitorRunner : MonitorRunner() { // Alerts are only added to alertsToUpdate after Action execution meaning the action results for it should be present // in the actionResultsMap but returning a default value when accessing the map to be safe. - val updatedAlerts = alertsToUpdate.map { alert -> - val bucketKeysHash = alert.aggregationResultBucket!!.getBucketKeysHash() - val actionResults = triggerResult.actionResultsMap.getOrDefault(bucketKeysHash, emptyMap()) - monitorCtx.alertService!!.updateActionResultsForBucketLevelAlert( - alert.copy(lastNotificationTime = MonitorRunnerService.currentTime()), - actionResults, - // TODO: Update BucketLevelTriggerRunResult.alertError() to retrieve error based on the first failed Action - monitorResult.alertError() ?: triggerResult.alertError() - ) - } + val updatedAlerts = + alertsToUpdate.map { alert -> + val bucketKeysHash = alert.aggregationResultBucket!!.getBucketKeysHash() + val actionResults = triggerResult.actionResultsMap.getOrDefault(bucketKeysHash, emptyMap()) + monitorCtx.alertService!!.updateActionResultsForBucketLevelAlert( + alert.copy(lastNotificationTime = MonitorRunnerService.currentTime()), + actionResults, + // TODO: Update BucketLevelTriggerRunResult.alertError() to retrieve error based on the first failed Action + monitorResult.alertError() ?: triggerResult.alertError(), + ) + } // Update Alerts with action execution results (if it's not a test Monitor). // ACKNOWLEDGED Alerts should not be saved here since actions are not executed for them. if (!dryrun && monitor.id != Monitor.NO_ID) { monitorCtx.alertService!!.saveAlerts( - monitor.dataSources, updatedAlerts, monitorCtx.retryPolicy!!, allowUpdatingAcknowledgedAlert = false, - routingId = monitor.id + monitor.dataSources, + updatedAlerts, + monitorCtx.retryPolicy!!, + allowUpdatingAcknowledgedAlert = false, + routingId = monitor.id, ) // Save any COMPLETED Alerts that were not covered in updatedAlerts monitorCtx.alertService!!.saveAlerts( @@ -419,7 +454,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { completedAlertsToUpdate.toList(), monitorCtx.retryPolicy!!, allowUpdatingAcknowledgedAlert = false, - monitor.id + monitor.id, ) } } @@ -456,43 +491,56 @@ object BucketLevelMonitorRunner : MonitorRunner() { fieldName = source.field() } } + is TermsAggregationBuilder -> { fieldName = aggFactory.field() } + else -> { logger.error( - "Bucket level monitor findings supported only for composite and term aggs. Found [{${aggFactory.type}}]" + "Bucket level monitor findings supported only for composite and term aggs. Found [{${aggFactory.type}}]", ) return listOf() } } } if (fieldName != "") { - val searchParams = mapOf( - "period_start" to periodStart.toEpochMilli(), - "period_end" to periodEnd.toEpochMilli() - ) - val searchSource = monitorCtx.scriptService!!.compile( - Script( - ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, - query.toString(), searchParams - ), - TemplateScript.CONTEXT - ) - .newInstance(searchParams) - .execute() + val searchParams = + mapOf( + "period_start" to periodStart.toEpochMilli(), + "period_end" to periodEnd.toEpochMilli(), + ) + val searchSource = + monitorCtx.scriptService!! + .compile( + Script( + ScriptType.INLINE, + Script.DEFAULT_TEMPLATE_LANG, + query.toString(), + searchParams, + ), + TemplateScript.CONTEXT, + ).newInstance(searchParams) + .execute() val sr = SearchRequest(*input.indices.toTypedArray()) - XContentType.JSON.xContent().createParser(monitorCtx.xContentRegistry, LoggingDeprecationHandler.INSTANCE, searchSource) + XContentType.JSON + .xContent() + .createParser(monitorCtx.xContentRegistry, LoggingDeprecationHandler.INSTANCE, searchSource) .use { val source = SearchSourceBuilder.fromXContent(it) - val queryBuilder = if (input.query.query() == null) BoolQueryBuilder() - else QueryBuilders.boolQuery().must(source.query()) + val queryBuilder = + if (input.query.query() == null) { + BoolQueryBuilder() + } else { + QueryBuilders.boolQuery().must(source.query()) + } queryBuilder.filter(QueryBuilders.termsQuery(fieldName, bucketValues)) sr.source().query(queryBuilder).sort("_seq_no", SortOrder.DESC) } - sr.cancelAfterTimeInterval = TimeValue.timeValueMinutes( - getCancelAfterTimeInterval() - ) + sr.cancelAfterTimeInterval = + TimeValue.timeValueMinutes( + getCancelAfterTimeInterval(), + ) val searchResponse: SearchResponse = monitorCtx.client!!.suspendUntil { monitorCtx.client!!.search(sr, it) } return createFindingPerIndex(searchResponse, monitor, monitorCtx, shouldCreateFinding, executionId) } else { @@ -508,7 +556,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { monitor: Monitor, monitorCtx: MonitorRunnerExecutionContext, shouldCreateFinding: Boolean, - workflowExecutionId: String? = null + workflowExecutionId: String? = null, ): List { val docIdsByIndexName: MutableMap> = mutableMapOf() for (hit in searchResponse.hits.hits) { @@ -520,25 +568,27 @@ object BucketLevelMonitorRunner : MonitorRunner() { var requestsToRetry: MutableList = mutableListOf() docIdsByIndexName.entries.forEach { it -> run { - val finding = Finding( - id = UUID.randomUUID().toString(), - relatedDocIds = it.value, - monitorId = monitor.id, - monitorName = monitor.name, - index = it.key, - timestamp = Instant.now(), - docLevelQueries = listOf(), - executionId = workflowExecutionId - ) + val finding = + Finding( + id = UUID.randomUUID().toString(), + relatedDocIds = it.value, + monitorId = monitor.id, + monitorName = monitor.name, + index = it.key, + timestamp = Instant.now(), + docLevelQueries = listOf(), + executionId = workflowExecutionId, + ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() logger.debug("Bucket level monitor ${monitor.id} Findings: $findingStr") if (shouldCreateFinding) { logger.debug("Saving bucket level monitor findings for monitor ${monitor.id}") - val indexRequest = IndexRequest(monitor.dataSources.findingsIndex) - .source(findingStr, XContentType.JSON) - .id(finding.id) - .routing(finding.id) + val indexRequest = + IndexRequest(monitor.dataSources.findingsIndex) + .source(findingStr, XContentType.JSON) + .id(finding.id) + .routing(finding.id) requestsToRetry.add(indexRequest) } findings.add(finding.id) @@ -566,22 +616,26 @@ object BucketLevelMonitorRunner : MonitorRunner() { alertCategory: AlertCategory, alertContext: AlertContext, ctx: BucketLevelTriggerExecutionContext, - error: Exception? - ): BucketLevelTriggerExecutionContext { - return when (alertCategory) { - AlertCategory.DEDUPED -> + error: Exception?, + ): BucketLevelTriggerExecutionContext = + when (alertCategory) { + AlertCategory.DEDUPED -> { ctx.copy(dedupedAlerts = listOf(alertContext), newAlerts = emptyList(), completedAlerts = emptyList(), error = error) - AlertCategory.NEW -> + } + + AlertCategory.NEW -> { ctx.copy(dedupedAlerts = emptyList(), newAlerts = listOf(alertContext), completedAlerts = emptyList(), error = error) - AlertCategory.COMPLETED -> + } + + AlertCategory.COMPLETED -> { ctx.copy(dedupedAlerts = emptyList(), newAlerts = emptyList(), completedAlerts = listOf(alertContext), error = error) + } } - } private fun getAlertContext( alert: Alert, alertSampleDocs: Map>>>, - alertComments: List? + alertComments: List?, ): AlertContext { val bucketKey = alert.aggregationResultBucket?.getBucketKeysHash() val sampleDocs = alertSampleDocs[alert.triggerId]?.get(bucketKey) @@ -593,7 +647,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { alert.id, alert.triggerId, alert.monitorId, - alert.executionId + alert.executionId, ) AlertContext(alert = alert, sampleDocs = listOf(), comments = alertComments) } @@ -610,7 +664,7 @@ object BucketLevelMonitorRunner : MonitorRunner() { client: Client, monitorId: String, triggerId: String, - searchRequest: SearchRequest + searchRequest: SearchRequest, ): Map>> { val sampleDocumentsByBucket = mutableMapOf>>() val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } @@ -622,12 +676,14 @@ object BucketLevelMonitorRunner : MonitorRunner() { val bucketKey = getBucketKeysHash((bucket.getOrDefault("key", mapOf()) as Map).values.toList()) if (bucketKey.isEmpty()) throw IllegalStateException("Cannot format bucket keys.") - val unwrappedTopHits = (bucket.getOrDefault("top_hits", mapOf()) as Map) - .getOrDefault("hits", mapOf()) as Map + val unwrappedTopHits = + (bucket.getOrDefault("top_hits", mapOf()) as Map) + .getOrDefault("hits", mapOf()) as Map val topHits = unwrappedTopHits.getOrDefault("hits", listOf>()) as List> - val unwrappedLowHits = (bucket.getOrDefault("low_hits", mapOf()) as Map) - .getOrDefault("hits", mapOf()) as Map + val unwrappedLowHits = + (bucket.getOrDefault("low_hits", mapOf()) as Map) + .getOrDefault("hits", mapOf()) as Map val lowHits = unwrappedLowHits.getOrDefault("hits", listOf>()) as List> // Reversing the order of lowHits so allHits will be in descending order. diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt index 8a961b3b9..586807247 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/DocumentLevelMonitorRunner.kt @@ -60,7 +60,7 @@ class DocumentLevelMonitorRunner : MonitorRunner() { dryrun: Boolean, workflowRunContext: WorkflowRunContext?, executionId: String, - transportService: TransportService + transportService: TransportService, ): MonitorRunResult { logger.debug("Document-level-monitor is running ...") val startTime = System.currentTimeMillis() @@ -92,29 +92,35 @@ class DocumentLevelMonitorRunner : MonitorRunner() { monitorResult = monitorResult.copy(error = AlertingException.wrap(e)) } - var (monitorMetadata, _) = MonitorMetadataService.getOrCreateMetadata( - monitor = monitor, - createWithRunContext = false, - skipIndex = isTempMonitor, - workflowRunContext?.workflowMetadataId - ) + var (monitorMetadata, _) = + MonitorMetadataService.getOrCreateMetadata( + monitor = monitor, + createWithRunContext = false, + skipIndex = isTempMonitor, + workflowRunContext?.workflowMetadataId, + ) val docLevelMonitorInput = monitor.inputs[0] as DocLevelMonitorInput val queries: List = docLevelMonitorInput.queries - val lastRunContext = if (monitorMetadata.lastRunContext.isNullOrEmpty()) mutableMapOf() - else monitorMetadata.lastRunContext.toMutableMap() as MutableMap> + val lastRunContext = + if (monitorMetadata.lastRunContext.isNullOrEmpty()) { + mutableMapOf() + } else { + monitorMetadata.lastRunContext.toMutableMap() as MutableMap> + } val updatedLastRunContext = lastRunContext.toMutableMap() try { // Resolve all passed indices to concrete indices - val allConcreteIndices = IndexUtils.resolveAllIndices( - docLevelMonitorInput.indices, - monitorCtx.clusterService!!, - monitorCtx.indexNameExpressionResolver!! - ) + val allConcreteIndices = + IndexUtils.resolveAllIndices( + docLevelMonitorInput.indices, + monitorCtx.clusterService!!, + monitorCtx.indexNameExpressionResolver!!, + ) if (allConcreteIndices.isEmpty()) { logger.error("indices not found-${docLevelMonitorInput.indices.joinToString(",")}") throw IndexNotFoundException(docLevelMonitorInput.indices.joinToString(",")) @@ -125,7 +131,7 @@ class DocumentLevelMonitorRunner : MonitorRunner() { monitor = monitor, monitorId = monitor.id, monitorMetadata, - indexTimeout = monitorCtx.indexTimeout!! + indexTimeout = monitorCtx.indexTimeout!!, ) // cleanup old indices that are not monitored anymore from the same monitor @@ -138,21 +144,23 @@ class DocumentLevelMonitorRunner : MonitorRunner() { // Map of document ids per index when monitor is workflow delegate and has chained findings val matchingDocIdsPerIndex = workflowRunContext?.matchingDocIdsPerIndex - val findingIdsForMatchingDocIds = if (workflowRunContext?.findingIds != null) { - workflowRunContext.findingIds - } else { - listOf() - } + val findingIdsForMatchingDocIds = + if (workflowRunContext?.findingIds != null) { + workflowRunContext.findingIds + } else { + listOf() + } val concreteIndicesSeenSoFar = mutableListOf() val updatedIndexNames = mutableListOf() val docLevelMonitorFanOutResponses: MutableList = mutableListOf() docLevelMonitorInput.indices.forEach { indexName -> - var concreteIndices = IndexUtils.resolveAllIndices( - listOf(indexName), - monitorCtx.clusterService!!, - monitorCtx.indexNameExpressionResolver!! - ) + var concreteIndices = + IndexUtils.resolveAllIndices( + listOf(indexName), + monitorCtx.clusterService!!, + monitorCtx.indexNameExpressionResolver!!, + ) var lastWriteIndex: String? = null if (IndexUtils.isAlias(indexName, monitorCtx.clusterService!!.state()) || IndexUtils.isDataStream(indexName, monitorCtx.clusterService!!.state()) @@ -161,45 +169,54 @@ class DocumentLevelMonitorRunner : MonitorRunner() { if (lastWriteIndex != null) { val lastWriteIndexCreationDate = IndexUtils.getCreationDateForIndex(lastWriteIndex, monitorCtx.clusterService!!.state()) - concreteIndices = IndexUtils.getNewestIndicesByCreationDate( - concreteIndices, - monitorCtx.clusterService!!.state(), - lastWriteIndexCreationDate - ) + concreteIndices = + IndexUtils.getNewestIndicesByCreationDate( + concreteIndices, + monitorCtx.clusterService!!.state(), + lastWriteIndexCreationDate, + ) } } concreteIndicesSeenSoFar.addAll(concreteIndices) val updatedIndexName = indexName.replace("*", "_") updatedIndexNames.add(updatedIndexName) - val conflictingFields = monitorCtx.docLevelMonitorQueries!!.getAllConflictingFields( - monitorCtx.clusterService!!.state(), - concreteIndices - ) + val conflictingFields = + monitorCtx.docLevelMonitorQueries!!.getAllConflictingFields( + monitorCtx.clusterService!!.state(), + concreteIndices, + ) concreteIndices.forEach { concreteIndexName -> // Prepare lastRunContext for each index - val indexLastRunContext = lastRunContext.getOrPut(concreteIndexName) { - val isIndexCreatedRecently = createdRecently( - monitor, - periodStart, - periodEnd, - monitorCtx.clusterService!!.state().metadata.index(concreteIndexName) - ) - MonitorMetadataService.createRunContextForIndex(concreteIndexName, isIndexCreatedRecently) - } + val indexLastRunContext = + lastRunContext.getOrPut(concreteIndexName) { + val isIndexCreatedRecently = + createdRecently( + monitor, + periodStart, + periodEnd, + monitorCtx.clusterService!! + .state() + .metadata + .index(concreteIndexName), + ) + MonitorMetadataService.createRunContextForIndex(concreteIndexName, isIndexCreatedRecently) + } val shardCount: Int = getShardsCount(monitorCtx.clusterService!!, concreteIndexName) // Prepare updatedLastRunContext for each index - val indexUpdatedRunContext = initializeNewLastRunContext( - indexLastRunContext.toMutableMap(), - concreteIndexName, - shardCount - ) as MutableMap + val indexUpdatedRunContext = + initializeNewLastRunContext( + indexLastRunContext.toMutableMap(), + concreteIndexName, + shardCount, + ) as MutableMap if (IndexUtils.isAlias(indexName, monitorCtx.clusterService!!.state()) || IndexUtils.isDataStream(indexName, monitorCtx.clusterService!!.state()) ) { - if (concreteIndexName == IndexUtils.getWriteIndex( + if (concreteIndexName == + IndexUtils.getWriteIndex( indexName, - monitorCtx.clusterService!!.state() + monitorCtx.clusterService!!.state(), ) ) { updatedLastRunContext.remove(lastWriteIndex) @@ -216,29 +233,33 @@ class DocumentLevelMonitorRunner : MonitorRunner() { // update lastRunContext if its a temp monitor as we only want to view the last bit of data then // TODO: If dryrun, we should make it so we limit the search as this could still potentially give us lots of data if (isTempMonitor) { - indexLastRunContext[shard] = if (indexLastRunContext.containsKey(shard)) { - if (indexLastRunContext[shard] is Long) { - max(-1L, indexUpdatedRunContext[shard] as Long - 10L) - } else if (indexLastRunContext[shard] is Int) { - max(-1L, (indexUpdatedRunContext[shard] as Int).toLong() - 10L) - } else -1L - } else { - -1L - } + indexLastRunContext[shard] = + if (indexLastRunContext.containsKey(shard)) { + if (indexLastRunContext[shard] is Long) { + max(-1L, indexUpdatedRunContext[shard] as Long - 10L) + } else if (indexLastRunContext[shard] is Int) { + max(-1L, (indexUpdatedRunContext[shard] as Int).toLong() - 10L) + } else { + -1L + } + } else { + -1L + } } } - val indexExecutionContext = IndexExecutionContext( - queries, - indexLastRunContext, - indexUpdatedRunContext, - updatedIndexName, - concreteIndexName, - updatedIndexNames, - concreteIndices, - conflictingFields.toList(), - matchingDocIdsPerIndex?.get(concreteIndexName), - findingIdsForMatchingDocIds - ) + val indexExecutionContext = + IndexExecutionContext( + queries, + indexLastRunContext, + indexUpdatedRunContext, + updatedIndexName, + concreteIndexName, + updatedIndexNames, + concreteIndices, + conflictingFields.toList(), + matchingDocIdsPerIndex?.get(concreteIndexName), + findingIdsForMatchingDocIds, + ) val shards = mutableSetOf() shards.addAll(indexUpdatedRunContext.keys) @@ -252,156 +273,169 @@ class DocumentLevelMonitorRunner : MonitorRunner() { **/ val clusterService = monitorCtx.clusterService!! val localNode = clusterService.localNode() - val nodeMap: Map = if (docLevelMonitorInput?.fanoutEnabled == true) { - getNodes(monitorCtx) - } else { - logger.info("Fan-out is disabled for chained findings monitor ${monitor.id}") - mapOf(localNode.id to localNode) - } + val nodeMap: Map = + if (docLevelMonitorInput?.fanoutEnabled == true) { + getNodes(monitorCtx) + } else { + logger.info("Fan-out is disabled for chained findings monitor ${monitor.id}") + mapOf(localNode.id to localNode) + } - val nodeShardAssignments = distributeShards( - monitorCtx.totalNodesFanOut, - nodeMap.keys.toList(), - shards.toList(), - monitorCtx.clusterService!!.state().metadata.index(concreteIndexName).index - ) + val nodeShardAssignments = + distributeShards( + monitorCtx.totalNodesFanOut, + nodeMap.keys.toList(), + shards.toList(), + monitorCtx.clusterService!! + .state() + .metadata + .index(concreteIndexName) + .index, + ) - val responses: Collection = suspendCoroutine { cont -> - val listener = GroupedActionListener( - object : ActionListener> { - override fun onResponse(response: Collection) { - cont.resume(response) - } + val responses: Collection = + suspendCoroutine { cont -> + val listener = + GroupedActionListener( + object : ActionListener> { + override fun onResponse(response: Collection) { + cont.resume(response) + } - override fun onFailure(e: Exception) { - if (e.cause is Exception) - cont.resumeWithException(e.cause as Exception) - else - cont.resumeWithException(e) - } - }, - nodeShardAssignments.size - ) - val responseReader = Writeable.Reader { - DocLevelMonitorFanOutResponse(it) - } - for (node in nodeMap) { - if (nodeShardAssignments.containsKey(node.key)) { - val docLevelMonitorFanOutRequest = DocLevelMonitorFanOutRequest( - monitor, - dryrun, - monitorMetadata, - executionId, - indexExecutionContext, - nodeShardAssignments[node.key]!!.toList(), - concreteIndicesSeenSoFar, - workflowRunContext + override fun onFailure(e: Exception) { + if (e.cause is Exception) { + cont.resumeWithException(e.cause as Exception) + } else { + cont.resumeWithException(e) + } + } + }, + nodeShardAssignments.size, ) - - transportService.sendRequest( - node.value, - DocLevelMonitorFanOutAction.NAME, - docLevelMonitorFanOutRequest, - TransportRequestOptions - .builder() - .withTimeout(monitorCtx.docLevelMonitorExecutionMaxDuration) - .build(), - object : ActionListenerResponseHandler( - listener, - responseReader - ) { - override fun handleException(e: TransportException) { - if ( - e is ReceiveTimeoutTransportException - ) { - logger.warn( - "fan_out timed out in node ${localNode.id} for doc level monitor ${monitor.id}," + - " attempting to collect partial results from other nodes. ExecutionId: $executionId" - ) - listener.onResponse( - DocLevelMonitorFanOutResponse( - localNode.id, - executionId, - monitor.id, - mutableMapOf() + val responseReader = + Writeable.Reader { + DocLevelMonitorFanOutResponse(it) + } + for (node in nodeMap) { + if (nodeShardAssignments.containsKey(node.key)) { + val docLevelMonitorFanOutRequest = + DocLevelMonitorFanOutRequest( + monitor, + dryrun, + monitorMetadata, + executionId, + indexExecutionContext, + nodeShardAssignments[node.key]!!.toList(), + concreteIndicesSeenSoFar, + workflowRunContext, + ) + + transportService.sendRequest( + node.value, + DocLevelMonitorFanOutAction.NAME, + docLevelMonitorFanOutRequest, + TransportRequestOptions + .builder() + .withTimeout(monitorCtx.docLevelMonitorExecutionMaxDuration) + .build(), + object : ActionListenerResponseHandler( + listener, + responseReader, + ) { + override fun handleException(e: TransportException) { + if ( + e is ReceiveTimeoutTransportException + ) { + logger.warn( + "fan_out timed out in node ${localNode.id} for doc level monitor ${monitor.id}," + + " attempting to collect partial results from other nodes. ExecutionId: $executionId", ) - ) - return - } - val cause = e.unwrapCause() - if (cause is ConnectTransportException || - ( - e is RemoteTransportException && - ( - cause is NodeClosedException || - cause is CircuitBreakingException || - cause is ActionNotFoundTransportException + listener.onResponse( + DocLevelMonitorFanOutResponse( + localNode.id, + executionId, + monitor.id, + mutableMapOf(), + ), + ) + return + } + val cause = e.unwrapCause() + if (cause is ConnectTransportException || + ( + e is RemoteTransportException && + ( + cause is NodeClosedException || + cause is CircuitBreakingException || + cause is ActionNotFoundTransportException ) ) - ) { - val localNode = monitorCtx.clusterService!!.localNode() - // retry in local node - transportService.sendRequest( - localNode, - DocLevelMonitorFanOutAction.NAME, - docLevelMonitorFanOutRequest, - TransportRequestOptions - .builder() - .withTimeout(monitorCtx.docLevelMonitorExecutionMaxDuration) - .build(), - object : - ActionListenerResponseHandler( - listener, - responseReader - ) { - override fun handleException(e: TransportException) { - logger.error("Fan out retry failed in node ${localNode.id}", e) - listener.onResponse( - DocLevelMonitorFanOutResponse( - "", - "", - "", - mutableMapOf(), - exception = if (e.cause is AlertingException) { - e.cause as AlertingException - } else { - AlertingException.wrap(e) as AlertingException - } + ) { + val localNode = monitorCtx.clusterService!!.localNode() + // retry in local node + transportService.sendRequest( + localNode, + DocLevelMonitorFanOutAction.NAME, + docLevelMonitorFanOutRequest, + TransportRequestOptions + .builder() + .withTimeout(monitorCtx.docLevelMonitorExecutionMaxDuration) + .build(), + object : + ActionListenerResponseHandler( + listener, + responseReader, + ) { + override fun handleException(e: TransportException) { + logger.error("Fan out retry failed in node ${localNode.id}", e) + listener.onResponse( + DocLevelMonitorFanOutResponse( + "", + "", + "", + mutableMapOf(), + exception = + if (e.cause is AlertingException) { + e.cause as AlertingException + } else { + AlertingException.wrap(e) as AlertingException + }, + ), ) - ) - } + } - override fun handleResponse(response: DocLevelMonitorFanOutResponse) { - listener.onResponse(response) - } - } - ) - } else { - logger.error("Fan out failed in node ${node.key}", e) - listener.onResponse( - DocLevelMonitorFanOutResponse( - "", - "", - "", - mutableMapOf(), - exception = if (e.cause is AlertingException) { - e.cause as AlertingException - } else { - AlertingException.wrap(e) as AlertingException - } + override fun handleResponse(response: DocLevelMonitorFanOutResponse) { + listener.onResponse(response) + } + }, + ) + } else { + logger.error("Fan out failed in node ${node.key}", e) + listener.onResponse( + DocLevelMonitorFanOutResponse( + "", + "", + "", + mutableMapOf(), + exception = + if (e.cause is AlertingException) { + e.cause as AlertingException + } else { + AlertingException.wrap(e) as AlertingException + }, + ), ) - ) + } } - } - override fun handleResponse(response: DocLevelMonitorFanOutResponse) { - listener.onResponse(response) - } - } - ) + override fun handleResponse(response: DocLevelMonitorFanOutResponse) { + listener.onResponse(response) + } + }, + ) + } } } - } docLevelMonitorFanOutResponses.addAll(responses) } } @@ -416,7 +450,7 @@ class DocumentLevelMonitorRunner : MonitorRunner() { if (!isTempMonitor) { MonitorMetadataService.upsertMetadata( monitorMetadata.copy(lastRunContext = updatedLastRunContext), - true + true, ) } else { // Clean up any queries created by the dry run monitor @@ -431,11 +465,12 @@ class DocumentLevelMonitorRunner : MonitorRunner() { monitorCtx.alertService!!.upsertMonitorErrorAlert(monitor, errorMessage, executionId, workflowRunContext) } logger.error("Failed running Document-level-monitor ${monitor.name}", e) - val alertingException = AlertingException( - errorMessage, - RestStatus.INTERNAL_SERVER_ERROR, - e - ) + val alertingException = + AlertingException( + errorMessage, + RestStatus.INTERNAL_SERVER_ERROR, + e, + ) return monitorResult.copy(error = alertingException, inputResults = InputRunResults(emptyList(), alertingException)) } finally { val endTime = System.currentTimeMillis() @@ -443,7 +478,7 @@ class DocumentLevelMonitorRunner : MonitorRunner() { logger.debug( "Monitor {} Time spent on monitor run: {}", monitor.id, - totalTimeTakenStat + totalTimeTakenStat, ) } } @@ -452,7 +487,6 @@ class DocumentLevelMonitorRunner : MonitorRunner() { docLevelMonitorFanOutResponses: MutableList, updatedLastRunContext: MutableMap>, ) { - // Prepare updatedLastRunContext for each index for (indexName in updatedLastRunContext.keys) { for (fanOutResponse in docLevelMonitorFanOutResponses) { @@ -462,15 +496,14 @@ class DocumentLevelMonitorRunner : MonitorRunner() { if (fanOutResponse.lastRunContexts.contains("index") && fanOutResponse.lastRunContexts["index"] == indexName) { fanOutResponse.lastRunContexts.keys.forEach { - - val seq_no = fanOutResponse.lastRunContexts[it].toString().toLongOrNull() + val seqNo = fanOutResponse.lastRunContexts[it].toString().toLongOrNull() if ( it != "shards_count" && it != "index" && - seq_no != null && - seq_no >= 0 + seqNo != null && + seqNo >= 0 ) { - indexLastRunContext[it] = seq_no + indexLastRunContext[it] = seqNo } } } @@ -480,7 +513,7 @@ class DocumentLevelMonitorRunner : MonitorRunner() { } private fun checkAndThrowExceptionIfAllFanOutsFailed( - docLevelMonitorFanOutResponses: MutableList + docLevelMonitorFanOutResponses: MutableList, ): AlertingException? { val exceptions = mutableListOf() for (res in docLevelMonitorFanOutResponses) { @@ -505,16 +538,18 @@ class DocumentLevelMonitorRunner : MonitorRunner() { if (documentLevelTriggerRunResult != null) { if (false == triggerResults.contains(triggerId)) { triggerResults[triggerId] = documentLevelTriggerRunResult - triggerErrorMap[triggerId] = if (documentLevelTriggerRunResult.error != null) { - val error = if (documentLevelTriggerRunResult.error is AlertingException) { - documentLevelTriggerRunResult.error as AlertingException + triggerErrorMap[triggerId] = + if (documentLevelTriggerRunResult.error != null) { + val error = + if (documentLevelTriggerRunResult.error is AlertingException) { + documentLevelTriggerRunResult.error as AlertingException + } else { + AlertingException.wrap(documentLevelTriggerRunResult.error!!) as AlertingException + } + mutableListOf(error) } else { - AlertingException.wrap(documentLevelTriggerRunResult.error!!) as AlertingException + mutableListOf() } - mutableListOf(error) - } else { - mutableListOf() - } } else { val currVal = triggerResults[triggerId] val newTriggeredDocs = mutableListOf() @@ -523,10 +558,11 @@ class DocumentLevelMonitorRunner : MonitorRunner() { val newActionResults = mutableMapOf>() newActionResults.putAll(currVal.actionResultsMap) newActionResults.putAll(documentLevelTriggerRunResult.actionResultsMap) - triggerResults[triggerId] = currVal.copy( - triggeredDocs = newTriggeredDocs, - actionResultsMap = newActionResults - ) + triggerResults[triggerId] = + currVal.copy( + triggeredDocs = newTriggeredDocs, + actionResultsMap = newActionResults, + ) if (documentLevelTriggerRunResult.error != null) { triggerErrorMap[triggerId]!!.add(documentLevelTriggerRunResult.error as AlertingException) @@ -588,19 +624,23 @@ class DocumentLevelMonitorRunner : MonitorRunner() { monitor: Monitor, periodStart: Instant, periodEnd: Instant, - indexMetadata: IndexMetadata + indexMetadata: IndexMetadata, ): Boolean { val lastExecutionTime = if (periodStart == periodEnd) monitor.lastUpdateTime else periodStart val indexCreationDate = indexMetadata.settings.get("index.creation_date")?.toLong() ?: 0L return indexCreationDate > lastExecutionTime.toEpochMilli() } - private fun getShardsCount(clusterService: ClusterService, index: String): Int { + private fun getShardsCount( + clusterService: ClusterService, + index: String, + ): Int { val allShards: List = clusterService!!.state().routingTable().allShards(index) return allShards.filter { it.primary() }.size } - private fun getNodes(monitorCtx: MonitorRunnerExecutionContext): Map { - return monitorCtx.clusterService!!.state().nodes.dataNodes.filter { it.value.version >= Version.CURRENT } - } + private fun getNodes(monitorCtx: MonitorRunnerExecutionContext): Map = + monitorCtx.clusterService!!.state().nodes.dataNodes.filter { + it.value.version >= Version.CURRENT + } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt index c77878bd4..22b699b9a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/InputService.kt @@ -60,9 +60,8 @@ class InputService( val xContentRegistry: NamedXContentRegistry, val clusterService: ClusterService, val settings: Settings, - val indexNameExpressionResolver: IndexNameExpressionResolver + val indexNameExpressionResolver: IndexNameExpressionResolver, ) { - private val logger = LogManager.getLogger(InputService::class.java) suspend fun collectInputResults( @@ -70,9 +69,9 @@ class InputService( periodStart: Instant, periodEnd: Instant, prevResult: InputRunResults? = null, - workflowRunContext: WorkflowRunContext? = null - ): InputRunResults { - return try { + workflowRunContext: WorkflowRunContext? = null, + ): InputRunResults = + try { val results = mutableListOf>() val aggTriggerAfterKey: MutableMap = mutableMapOf() @@ -84,26 +83,30 @@ class InputService( monitor.inputs.forEach { input -> when (input) { is SearchInput -> { - val searchRequest = getSearchRequest( - monitor = monitor, - searchInput = input, - periodStart = periodStart, - periodEnd = periodEnd, - prevResult = prevResult, - matchingDocIdsPerIndex = matchingDocIdsPerIndex, - returnSampleDocs = false - ) + val searchRequest = + getSearchRequest( + monitor = monitor, + searchInput = input, + periodStart = periodStart, + periodEnd = periodEnd, + prevResult = prevResult, + matchingDocIdsPerIndex = matchingDocIdsPerIndex, + returnSampleDocs = false, + ) val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - aggTriggerAfterKey += AggregationQueryRewriter.getAfterKeysFromSearchResponse( - searchResponse, - monitor.triggers, - prevResult?.aggTriggersAfterKey - ) + aggTriggerAfterKey += + AggregationQueryRewriter.getAfterKeysFromSearchResponse( + searchResponse, + monitor.triggers, + prevResult?.aggTriggersAfterKey, + ) results += searchResponse.convertToMap() } + is ClusterMetricsInput -> { results += handleClusterMetricsInput(input) } + else -> { throw IllegalArgumentException("Unsupported input type: ${input.name()}.") } @@ -114,7 +117,6 @@ class InputService( logger.info("Error collecting inputs for monitor: ${monitor.id}", e) InputRunResults(emptyList(), e) } - } /** * Extends the given query builder with query that filters the given indices with the given doc ids per index @@ -136,14 +138,13 @@ class InputService( .add( BoolQueryBuilder() .must(MatchQueryBuilder("_index", entry.key)) - .must(TermsQueryBuilder("_id", entry.value)) + .must(TermsQueryBuilder("_id", entry.value)), ) } return queryBuilder.must(shouldQuery) } - private fun chainedFindingExist(indexToDocIds: Map>?) = - !indexToDocIds.isNullOrEmpty() + private fun chainedFindingExist(indexToDocIds: Map>?) = !indexToDocIds.isNullOrEmpty() private fun deepCopyQuery(query: SearchSourceBuilder): SearchSourceBuilder { val out = BytesStreamOutput() @@ -163,25 +164,33 @@ class InputService( * other user's detector id and use it to create monitor, this method will only return anomaly * results they can read. */ - suspend fun collectInputResultsForADMonitor(monitor: Monitor, periodStart: Instant, periodEnd: Instant): InputRunResults { - return try { + suspend fun collectInputResultsForADMonitor( + monitor: Monitor, + periodStart: Instant, + periodEnd: Instant, + ): InputRunResults = + try { val results = mutableListOf>() val input = monitor.inputs[0] as SearchInput val searchParams = mapOf("period_start" to periodStart.toEpochMilli(), "period_end" to periodEnd.toEpochMilli()) - val searchSource = scriptService.compile( - Script( - ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, - input.query.toString(), searchParams - ), - TemplateScript.CONTEXT - ) - .newInstance(searchParams) - .execute() - - val searchRequest = SearchRequest() - .indices(*input.indices.toTypedArray()) - .preference(Preference.PRIMARY_FIRST.type()) + val searchSource = + scriptService + .compile( + Script( + ScriptType.INLINE, + Script.DEFAULT_TEMPLATE_LANG, + input.query.toString(), + searchParams, + ), + TemplateScript.CONTEXT, + ).newInstance(searchParams) + .execute() + + val searchRequest = + SearchRequest() + .indices(*input.indices.toTypedArray()) + .preference(Preference.PRIMARY_FIRST.type()) XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, searchSource).use { searchRequest.source(SearchSourceBuilder.fromXContent(it)) } @@ -207,7 +216,6 @@ class InputService( logger.info("Error collecting anomaly result inputs for monitor: ${monitor.id}", e) InputRunResults(emptyList(), e) } - } fun getSearchRequest( monitor: Monitor, @@ -216,22 +224,24 @@ class InputService( periodEnd: Instant, prevResult: InputRunResults?, matchingDocIdsPerIndex: Map>?, - returnSampleDocs: Boolean = false + returnSampleDocs: Boolean = false, ): SearchRequest { // TODO: Figure out a way to use SearchTemplateRequest without bringing in the entire TransportClient - val searchParams = mapOf( - "period_start" to periodStart.toEpochMilli(), - "period_end" to periodEnd.toEpochMilli() - ) + val searchParams = + mapOf( + "period_start" to periodStart.toEpochMilli(), + "period_end" to periodEnd.toEpochMilli(), + ) // Deep copying query before passing it to rewriteQuery since otherwise, the monitor.input is modified directly // which causes a strange bug where the rewritten query persists on the Monitor across executions - val rewrittenQuery = AggregationQueryRewriter.rewriteQuery( - deepCopyQuery(searchInput.query), - prevResult, - monitor.triggers, - returnSampleDocs - ) + val rewrittenQuery = + AggregationQueryRewriter.rewriteQuery( + deepCopyQuery(searchInput.query), + prevResult, + monitor.triggers, + returnSampleDocs, + ) // Rewrite query to consider the doc ids per given index if (chainedFindingExist(matchingDocIdsPerIndex) && rewrittenQuery.query() != null) { @@ -239,31 +249,38 @@ class InputService( rewrittenQuery.query(updatedSourceQuery) } - val searchSource = scriptService.compile( - Script( - ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, - rewrittenQuery.toString(), searchParams - ), - TemplateScript.CONTEXT - ) - .newInstance(searchParams) - .execute() + val searchSource = + scriptService + .compile( + Script( + ScriptType.INLINE, + Script.DEFAULT_TEMPLATE_LANG, + rewrittenQuery.toString(), + searchParams, + ), + TemplateScript.CONTEXT, + ).newInstance(searchParams) + .execute() val indexes = CrossClusterMonitorUtils.parseIndexesForRemoteSearch(searchInput.indices, clusterService) - val resolvedIndexes = if (searchInput.query.query() == null) indexes else { - val query = searchInput.query.query() - resolveOnlyQueryableIndicesFromLocalClusterAliases( - monitor, - periodEnd, - query, + val resolvedIndexes = + if (searchInput.query.query() == null) { indexes - ) - } + } else { + val query = searchInput.query.query() + resolveOnlyQueryableIndicesFromLocalClusterAliases( + monitor, + periodEnd, + query, + indexes, + ) + } - val searchRequest = SearchRequest() - .indices(*resolvedIndexes.toTypedArray()) - .preference(Preference.PRIMARY_FIRST.type()) + val searchRequest = + SearchRequest() + .indices(*resolvedIndexes.toTypedArray()) + .preference(Preference.PRIMARY_FIRST.type()) XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, searchSource).use { searchRequest.source(SearchSourceBuilder.fromXContent(it)) @@ -295,17 +312,18 @@ class InputService( val resolvedIndexes = ArrayList() indexes.forEach { // we don't optimize for remote cluster aliases. we directly pass them to search request - if (CrossClusterMonitorUtils.isRemoteClusterIndex(it, clusterService)) + if (CrossClusterMonitorUtils.isRemoteClusterIndex(it, clusterService)) { resolvedIndexes.add(it) - else { + } else { val state = clusterService.state() if (IndexUtils.isAlias(it, state)) { val resolveStartTimeOfQueryTimeRange = resolveStartTimeofQueryTimeRange(monitor, query, periodEnd) if (resolveStartTimeOfQueryTimeRange != null) { val indices = IndexUtils.resolveAllIndices(listOf(it), clusterService, indexNameExpressionResolver) - val sortedIndices = indices - .mapNotNull { state.metadata().index(it) } // Get IndexMetadata for each index - .sortedBy { it.creationDate } // Sort by creation date + val sortedIndices = + indices + .mapNotNull { state.metadata().index(it) } // Get IndexMetadata for each index + .sortedBy { it.creationDate } // Sort by creation date var includePrevious = true for (i in sortedIndices.indices) { @@ -319,7 +337,7 @@ class InputService( includePrevious && ( i == sortedIndices.lastIndex || sortedIndices[i + 1].creationDate >= resolveStartTimeOfQueryTimeRange.toEpochMilli() - ) + ) ) { // Include the index immediately before the timestamp resolvedIndexes.add(indexMetadata.index.name) @@ -372,7 +390,11 @@ class InputService( return results } - fun resolveStartTimeofQueryTimeRange(monitor: Monitor, query: QueryBuilder, periodEnd: Instant): Instant? { + fun resolveStartTimeofQueryTimeRange( + monitor: Monitor, + query: QueryBuilder, + periodEnd: Instant, + ): Instant? { try { val rangeQuery = findRangeQuery(query) ?: return null val searchParameter = rangeQuery.from().toString() // we are looking for 'timeframe' variable {{period_end}}||- @@ -380,22 +402,24 @@ class InputService( val timeframeString = searchParameter.substringAfter("||-") val timeframeRegex = Regex("(\\d+)([a-zA-Z]+)") val matchResult = timeframeRegex.find(timeframeString) - val (amount, unit) = matchResult?.destructured?.let { (a, u) -> a to u } - ?: throw IllegalArgumentException("Invalid timeframe format: $timeframeString") - val duration = when (unit) { - "s" -> Duration.ofSeconds(amount.toLong()) - "m" -> Duration.ofMinutes(amount.toLong()) - "h" -> Duration.ofHours(amount.toLong()) - "d" -> Duration.ofDays(amount.toLong()) - else -> throw IllegalArgumentException("Invalid time unit: $unit") - } + val (amount, unit) = + matchResult?.destructured?.let { (a, u) -> a to u } + ?: throw IllegalArgumentException("Invalid timeframe format: $timeframeString") + val duration = + when (unit) { + "s" -> Duration.ofSeconds(amount.toLong()) + "m" -> Duration.ofMinutes(amount.toLong()) + "h" -> Duration.ofHours(amount.toLong()) + "d" -> Duration.ofDays(amount.toLong()) + else -> throw IllegalArgumentException("Invalid time unit: $unit") + } return periodEnd.minus(duration) } catch (e: Exception) { logger.error( "Monitor ${monitor.id}:" + " Failed to resolve time frame of search query while optimizing to query only on few of alias' concrete indices", - e + e, ) return null // won't do optimization as we failed to resolve the timeframe due to unexpected error } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorFanOutUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorFanOutUtils.kt index 294d154d9..9ee939112 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorFanOutUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorFanOutUtils.kt @@ -22,9 +22,10 @@ fun distributeShards( val numFanOutNodes = allNodes.size.coerceAtMost(totalShards) val totalNodes = maxFanoutNodes.coerceAtMost(numFanOutNodes) - val shardIdList = shards.map { - ShardId(index, it.toInt()) - } + val shardIdList = + shards.map { + ShardId(index, it.toInt()) + } val shuffledNodes = allNodes.shuffled() val nodes = shuffledNodes.subList(0, totalNodes) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorMetadataService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorMetadataService.kt index abd2bc1c2..7c502b432 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorMetadataService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorMetadataService.kt @@ -54,7 +54,6 @@ private val log = LogManager.getLogger(MonitorMetadataService::class.java) object MonitorMetadataService : CoroutineScope by CoroutineScope(SupervisorJob() + Dispatchers.Default + CoroutineName("MonitorMetadataService")) { - private lateinit var client: Client private lateinit var xContentRegistry: NamedXContentRegistry private lateinit var clusterService: ClusterService @@ -78,22 +77,25 @@ object MonitorMetadataService : } @Suppress("ComplexMethod", "ReturnCount") - suspend fun upsertMetadata(metadata: MonitorMetadata, updating: Boolean): MonitorMetadata { + suspend fun upsertMetadata( + metadata: MonitorMetadata, + updating: Boolean, + ): MonitorMetadata { try { if (clusterService.state().routingTable.hasIndex(ScheduledJob.SCHEDULED_JOBS_INDEX)) { - val indexRequest = IndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source( - metadata.toXContent( - XContentFactory.jsonBuilder(), - ToXContent.MapParams(mapOf("with_type" to "true")) - ) - ) - .id(metadata.id) - .routing(metadata.monitorId) - .setIfSeqNo(metadata.seqNo) - .setIfPrimaryTerm(metadata.primaryTerm) - .timeout(indexTimeout) + val indexRequest = + IndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source( + metadata.toXContent( + XContentFactory.jsonBuilder(), + ToXContent.MapParams(mapOf("with_type" to "true")), + ), + ).id(metadata.id) + .routing(metadata.monitorId) + .setIfSeqNo(metadata.seqNo) + .setIfPrimaryTerm(metadata.primaryTerm) + .timeout(indexTimeout) if (updating) { indexRequest.id(metadata.id).setIfSeqNo(metadata.seqNo).setIfPrimaryTerm(metadata.primaryTerm) @@ -109,7 +111,7 @@ object MonitorMetadataService : throw AlertingException( failureReason, RestStatus.INTERNAL_SERVER_ERROR, - IllegalStateException(failureReason) + IllegalStateException(failureReason), ) } @@ -119,7 +121,7 @@ object MonitorMetadataService : } return metadata.copy( seqNo = response.seqNo, - primaryTerm = response.primaryTerm + primaryTerm = response.primaryTerm, ) } else { val failureReason = "Job index ${ScheduledJob.SCHEDULED_JOBS_INDEX} does not exist to update monitor metadata" @@ -140,7 +142,7 @@ object MonitorMetadataService : createWithRunContext: Boolean = true, skipIndex: Boolean = false, workflowMetadataId: String? = null, - forceCreateLastRunContext: Boolean = false + forceCreateLastRunContext: Boolean = false, ): Pair { try { val created = true @@ -163,31 +165,41 @@ object MonitorMetadataService : } } - private suspend fun createUpdatedRunContext( - monitor: Monitor - ): Map> { - val monitorIndex = if (monitor.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR.value) - (monitor.inputs[0] as DocLevelMonitorInput).indices[0] - else if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) - (monitor.inputs[0] as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices[0] - else null - val runContext = if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) - createFullRunContext(monitorIndex) - else emptyMap() + private suspend fun createUpdatedRunContext(monitor: Monitor): Map> { + val monitorIndex = + if (monitor.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR.value) { + (monitor.inputs[0] as DocLevelMonitorInput).indices[0] + } else if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) { + (monitor.inputs[0] as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices[0] + } else { + null + } + val runContext = + if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) { + createFullRunContext(monitorIndex) + } else { + emptyMap() + } return runContext } - suspend fun getMetadata(monitor: Monitor, workflowMetadataId: String? = null): MonitorMetadata? { + suspend fun getMetadata( + monitor: Monitor, + workflowMetadataId: String? = null, + ): MonitorMetadata? { try { val metadataId = MonitorMetadata.getId(monitor, workflowMetadataId) val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, metadataId).routing(monitor.id) val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } return if (getResponse.isExists) { - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, + ) XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) MonitorMetadata.parse(xcp, getResponse.id, getResponse.seqNo, getResponse.primaryTerm) } else { @@ -202,19 +214,28 @@ object MonitorMetadataService : } } - suspend fun recreateRunContext(metadata: MonitorMetadata, monitor: Monitor): MonitorMetadata { + suspend fun recreateRunContext( + metadata: MonitorMetadata, + monitor: Monitor, + ): MonitorMetadata { try { - val monitorIndex = if (monitor.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR.value) - (monitor.inputs[0] as DocLevelMonitorInput).indices[0] - else if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) - (monitor.inputs[0] as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices[0] - else null - val runContext = if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) - createFullRunContext(monitorIndex, metadata.lastRunContext as MutableMap>) - else null + val monitorIndex = + if (monitor.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR.value) { + (monitor.inputs[0] as DocLevelMonitorInput).indices[0] + } else if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) { + (monitor.inputs[0] as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices[0] + } else { + null + } + val runContext = + if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) { + createFullRunContext(monitorIndex, metadata.lastRunContext as MutableMap>) + } else { + null + } return if (runContext != null) { metadata.copy( - lastRunContext = runContext + lastRunContext = runContext, ) } else { metadata @@ -229,14 +250,20 @@ object MonitorMetadataService : createWithRunContext: Boolean, workflowMetadataId: String? = null, ): MonitorMetadata { - val monitorIndex = if (monitor.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR.value) - (monitor.inputs[0] as DocLevelMonitorInput).indices[0] - else if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) - (monitor.inputs[0] as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices[0] - else null - val runContext = if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) - createFullRunContext(monitorIndex) - else emptyMap() + val monitorIndex = + if (monitor.monitorType == Monitor.MonitorType.DOC_LEVEL_MONITOR.value) { + (monitor.inputs[0] as DocLevelMonitorInput).indices[0] + } else if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) { + (monitor.inputs[0] as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices[0] + } else { + null + } + val runContext = + if (monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value)) { + createFullRunContext(monitorIndex) + } else { + emptyMap() + } return MonitorMetadata( id = MonitorMetadata.getId(monitor, workflowMetadataId), seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, @@ -244,7 +271,7 @@ object MonitorMetadataService : monitorId = monitor.id, lastActionExecutionTimes = emptyList(), lastRunContext = runContext, - sourceToQueryIndexMapping = mutableMapOf() + sourceToQueryIndexMapping = mutableMapOf(), ) } @@ -263,9 +290,10 @@ object MonitorMetadataService : IndexUtils.getWriteIndex(index, clusterService.state())?.let { indices.add(it) } } else { val getIndexRequest = GetIndexRequest().indices(index) - val getIndexResponse: GetIndexResponse = client.suspendUntil { - client.admin().indices().getIndex(getIndexRequest, it) - } + val getIndexResponse: GetIndexResponse = + client.suspendUntil { + client.admin().indices().getIndex(getIndexRequest, it) + } indices.addAll(getIndexResponse.indices()) } @@ -281,7 +309,7 @@ object MonitorMetadataService : throw AlertingException( "Failed fetching index stats - missing required index permissions: ${e.localizedMessage}", RestStatus.INTERNAL_SERVER_ERROR, - e + e, ) } catch (e: Exception) { throw AlertingException("Failed fetching index stats", RestStatus.INTERNAL_SERVER_ERROR, e) @@ -289,7 +317,10 @@ object MonitorMetadataService : return lastRunContext } - suspend fun createRunContextForIndex(index: String, createdRecently: Boolean = false): MutableMap { + suspend fun createRunContextForIndex( + index: String, + createdRecently: Boolean = false, + ): MutableMap { val request = IndicesStatsRequest().indices(index).clear() val response: IndicesStatsResponse = client.suspendUntil { execute(IndicesStatsAction.INSTANCE, request, it) } if (response.status != RestStatus.OK) { @@ -304,8 +335,11 @@ object MonitorMetadataService : for (shard in shards) { lastRunContext[shard.shardRouting.id.toString()] = - if (createdRecently) -1L - else shard.seqNoStats?.globalCheckpoint ?: SequenceNumbers.UNASSIGNED_SEQ_NO + if (createdRecently) { + -1L + } else { + shard.seqNoStats?.globalCheckpoint ?: SequenceNumbers.UNASSIGNED_SEQ_NO + } } return lastRunContext } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt index daca08a30..2ad25105a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunner.kt @@ -21,7 +21,6 @@ import org.opensearch.transport.TransportService import java.time.Instant abstract class MonitorRunner { - abstract suspend fun runMonitor( monitor: Monitor, monitorCtx: MonitorRunnerExecutionContext, @@ -30,7 +29,7 @@ abstract class MonitorRunner { dryRun: Boolean, workflowRunContext: WorkflowRunContext? = null, executionId: String, - transportService: TransportService + transportService: TransportService, ): MonitorRunResult<*> suspend fun runAction( @@ -38,16 +37,19 @@ abstract class MonitorRunner { ctx: TriggerExecutionContext, monitorCtx: MonitorRunnerExecutionContext, monitor: Monitor, - dryrun: Boolean + dryrun: Boolean, ): ActionRunResult { return try { if (ctx is QueryLevelTriggerExecutionContext && !MonitorRunnerService.isActionActionable(action, ctx.alert?.alert)) { return ActionRunResult(action.id, action.name, mapOf(), true, null, null) } val actionOutput = mutableMapOf() - actionOutput[Action.SUBJECT] = if (action.subjectTemplate != null) - MonitorRunnerService.compileTemplate(action.subjectTemplate!!, ctx) - else "" + actionOutput[Action.SUBJECT] = + if (action.subjectTemplate != null) { + MonitorRunnerService.compileTemplate(action.subjectTemplate!!, ctx) + } else { + "" + } actionOutput[Action.MESSAGE] = MonitorRunnerService.compileTemplate(action.messageTemplate, ctx) if (Strings.isNullOrEmpty(actionOutput[Action.MESSAGE])) { throw IllegalStateException("Message content missing in the Destination with id: ${action.destinationId}") @@ -61,15 +63,16 @@ abstract class MonitorRunner { monitorCtx.settings!!, monitorCtx.threadPool!!.threadContext, monitor.user?.roles, - monitor.user - ) + monitor.user, + ), ) { - actionOutput[Action.MESSAGE_ID] = getConfigAndSendNotification( - action, - monitorCtx, - actionOutput[Action.SUBJECT], - actionOutput[Action.MESSAGE]!! - ) + actionOutput[Action.MESSAGE_ID] = + getConfigAndSendNotification( + action, + monitorCtx, + actionOutput[Action.SUBJECT], + actionOutput[Action.MESSAGE]!!, + ) } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt index 5c5e24070..53efe6799 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerExecutionContext.kt @@ -27,7 +27,6 @@ import org.opensearch.threadpool.ThreadPool import org.opensearch.transport.client.Client data class MonitorRunnerExecutionContext( - var clusterService: ClusterService? = null, var client: Client? = null, var xContentRegistry: NamedXContentRegistry? = null, @@ -45,28 +44,26 @@ data class MonitorRunnerExecutionContext( var jvmStats: JvmStats? = null, var findingsToTriggeredQueries: Map>? = null, var remoteMonitors: Map = mapOf(), - @Volatile var retryPolicy: BackoffPolicy? = null, @Volatile var moveAlertsRetryPolicy: BackoffPolicy? = null, - @Volatile var allowList: List = DestinationSettings.ALLOW_LIST_NONE, @Volatile var hostDenyList: List = LegacyOpenDistroDestinationSettings.HOST_DENY_LIST_NONE, - @Volatile var destinationSettings: Map? = null, @Volatile var destinationContextFactory: DestinationContextFactory? = null, - @Volatile var maxActionableAlertCount: Long = AlertingSettings.DEFAULT_MAX_ACTIONABLE_ALERT_COUNT, @Volatile var indexTimeout: TimeValue? = null, @Volatile var cancelAfterTimeInterval: TimeValue? = null, @Volatile var findingsIndexBatchSize: Int = AlertingSettings.DEFAULT_FINDINGS_INDEXING_BATCH_SIZE, @Volatile var fetchOnlyQueryFieldNames: Boolean = true, @Volatile var percQueryMaxNumDocsInMemory: Int = AlertingSettings.DEFAULT_PERCOLATE_QUERY_NUM_DOCS_IN_MEMORY, - @Volatile var docLevelMonitorFanoutMaxDuration: TimeValue = TimeValue.timeValueMinutes( - AlertingSettings.DEFAULT_MAX_DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION_MINUTES - ), - @Volatile var docLevelMonitorExecutionMaxDuration: TimeValue = TimeValue.timeValueMinutes( - AlertingSettings.DEFAULT_MAX_DOC_LEVEL_MONITOR_EXECUTION_MAX_DURATION_MINUTES - ), + @Volatile var docLevelMonitorFanoutMaxDuration: TimeValue = + TimeValue.timeValueMinutes( + AlertingSettings.DEFAULT_MAX_DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION_MINUTES, + ), + @Volatile var docLevelMonitorExecutionMaxDuration: TimeValue = + TimeValue.timeValueMinutes( + AlertingSettings.DEFAULT_MAX_DOC_LEVEL_MONITOR_EXECUTION_MAX_DURATION_MINUTES, + ), @Volatile var percQueryDocsSizeMemoryPercentageLimit: Int = AlertingSettings.DEFAULT_PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT, @Volatile var docLevelMonitorShardFetchSize: Int = diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt index f9fb05948..101f33743 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorRunnerService.kt @@ -99,7 +99,6 @@ import java.util.UUID import kotlin.coroutines.CoroutineContext object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleComponent() { - private val logger = LogManager.getLogger(javaClass) var monitorCtx: MonitorRunnerExecutionContext = MonitorRunnerExecutionContext() @@ -189,10 +188,11 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon // Must be called after registerClusterService and registerSettings in AlertingPlugin fun registerConsumers(): MonitorRunnerService { - monitorCtx.retryPolicy = BackoffPolicy.constantBackoff( - ALERT_BACKOFF_MILLIS.get(monitorCtx.settings), - ALERT_BACKOFF_COUNT.get(monitorCtx.settings) - ) + monitorCtx.retryPolicy = + BackoffPolicy.constantBackoff( + ALERT_BACKOFF_MILLIS.get(monitorCtx.settings), + ALERT_BACKOFF_COUNT.get(monitorCtx.settings), + ) monitorCtx.cancelAfterTimeInterval = SEARCH_CANCEL_AFTER_TIME_INTERVAL_SETTING.get(monitorCtx.settings) @@ -203,11 +203,11 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon monitorCtx.moveAlertsRetryPolicy = BackoffPolicy.exponentialBackoff( MOVE_ALERTS_BACKOFF_MILLIS.get(monitorCtx.settings), - MOVE_ALERTS_BACKOFF_COUNT.get(monitorCtx.settings) + MOVE_ALERTS_BACKOFF_COUNT.get(monitorCtx.settings), ) monitorCtx.clusterService!!.clusterSettings.addSettingsUpdateConsumer( MOVE_ALERTS_BACKOFF_MILLIS, - MOVE_ALERTS_BACKOFF_COUNT + MOVE_ALERTS_BACKOFF_COUNT, ) { millis, count -> monitorCtx.moveAlertsRetryPolicy = BackoffPolicy.exponentialBackoff(millis, count) } @@ -257,14 +257,16 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon monitorCtx.percQueryDocsSizeMemoryPercentageLimit = PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT.get(monitorCtx.settings) - monitorCtx.clusterService!!.clusterSettings + monitorCtx.clusterService!! + .clusterSettings .addSettingsUpdateConsumer(PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT) { monitorCtx.percQueryDocsSizeMemoryPercentageLimit = it } monitorCtx.docLevelMonitorShardFetchSize = DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE.get(monitorCtx.settings) - monitorCtx.clusterService!!.clusterSettings + monitorCtx.clusterService!! + .clusterSettings .addSettingsUpdateConsumer(DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE) { monitorCtx.docLevelMonitorShardFetchSize = it } @@ -378,7 +380,11 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon } } - override fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant) { + override fun runJob( + job: ScheduledJob, + periodStart: Instant, + periodEnd: Instant, + ) { when (job) { is Workflow -> { launch { @@ -397,9 +403,9 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon TimeValue(periodEnd.toEpochMilli()), job.id, job, - TimeValue(periodStart.toEpochMilli()) + TimeValue(periodStart.toEpochMilli()), ), - it + it, ) } } catch (e: Exception) { @@ -410,6 +416,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon } } } + is Monitor -> { launch { var monitorLock: LockModel? = null @@ -420,20 +427,24 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon logger.debug("lock ${monitorLock.lockId} acquired") logger.debug( "PERF_DEBUG: executing ${job.monitorType} ${job.id} on node " + - monitorCtx.clusterService!!.state().nodes().localNode.id - ) - val executeMonitorRequest = ExecuteMonitorRequest( - false, - TimeValue(periodEnd.toEpochMilli()), - job.id, - job, - TimeValue(periodStart.toEpochMilli()) + monitorCtx.clusterService!! + .state() + .nodes() + .localNode.id, ) + val executeMonitorRequest = + ExecuteMonitorRequest( + false, + TimeValue(periodEnd.toEpochMilli()), + job.id, + job, + TimeValue(periodStart.toEpochMilli()), + ) monitorCtx.client!!.suspendUntil { monitorCtx.client!!.execute( ExecuteMonitorAction.INSTANCE, executeMonitorRequest, - it + it, ) } } catch (e: Exception) { @@ -444,6 +455,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon } } } + is MonitorV2 -> { if (job !is PPLSQLMonitor) { throw IllegalStateException("Invalid MonitorV2 type: ${job.javaClass.name}") @@ -458,20 +470,24 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon logger.debug("lock ${monitorLock!!.lockId} acquired") logger.debug( "PERF_DEBUG: executing $PPL_SQL_MONITOR_TYPE ${job.id} on node " + - monitorCtx.clusterService!!.state().nodes().localNode.id - ) - val executeMonitorV2Request = ExecuteMonitorV2Request( - false, - false, - job.id, // only need to pass in MonitorV2 ID - null, // no need to pass in MonitorV2 object itself - TimeValue(periodEnd.toEpochMilli()) + monitorCtx.clusterService!! + .state() + .nodes() + .localNode.id, ) + val executeMonitorV2Request = + ExecuteMonitorV2Request( + false, + false, + job.id, // only need to pass in MonitorV2 ID + null, // no need to pass in MonitorV2 object itself + TimeValue(periodEnd.toEpochMilli()), + ) monitorCtx.client!!.suspendUntil { monitorCtx.client!!.execute( ExecuteMonitorV2Action.INSTANCE, executeMonitorV2Request, - it + it, ) } } catch (e: Exception) { @@ -482,6 +498,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon } } } + else -> { throw IllegalArgumentException("Invalid job type") } @@ -493,17 +510,15 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon periodStart: Instant, periodEnd: Instant, dryrun: Boolean, - transportService: TransportService - ): WorkflowRunResult { - return CompositeWorkflowRunner.runWorkflow(workflow, monitorCtx, periodStart, periodEnd, dryrun, transportService) - } + transportService: TransportService, + ): WorkflowRunResult = CompositeWorkflowRunner.runWorkflow(workflow, monitorCtx, periodStart, periodEnd, dryrun, transportService) suspend fun runJob( job: ScheduledJob, periodStart: Instant, periodEnd: Instant, dryrun: Boolean, - transportService: TransportService + transportService: TransportService, ): MonitorRunResult<*> { // Updating the scheduled job index at the start of monitor execution runs for when there is an upgrade the the schema mapping // has not been updated. @@ -526,44 +541,45 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon throw AlertingException( "Index patterns are not supported in doc level monitors.", RestStatus.BAD_REQUEST, - IllegalArgumentException("Index patterns are not supported in doc level monitors.") + IllegalArgumentException("Index patterns are not supported in doc level monitors."), ) } logger.info( "Executing scheduled monitor - id: ${monitor.id}, type: ${monitor.monitorType}, periodStart: $periodStart, " + - "periodEnd: $periodEnd, dryrun: $dryrun, executionId: $executionId" + "periodEnd: $periodEnd, dryrun: $dryrun, executionId: $executionId", ) - val runResult = if (monitor.isBucketLevelMonitor()) { - BucketLevelMonitorRunner.runMonitor( - monitor, - monitorCtx, - periodStart, - periodEnd, - dryrun, - executionId = executionId, - transportService = transportService - ) - } else if (monitor.isDocLevelMonitor()) { - DocumentLevelMonitorRunner().runMonitor( - monitor, - monitorCtx, - periodStart, - periodEnd, - dryrun, - executionId = executionId, - transportService = transportService - ) - } else { - QueryLevelMonitorRunner.runMonitor( - monitor, - monitorCtx, - periodStart, - periodEnd, - dryrun, - executionId = executionId, - transportService = transportService - ) - } + val runResult = + if (monitor.isBucketLevelMonitor()) { + BucketLevelMonitorRunner.runMonitor( + monitor, + monitorCtx, + periodStart, + periodEnd, + dryrun, + executionId = executionId, + transportService = transportService, + ) + } else if (monitor.isDocLevelMonitor()) { + DocumentLevelMonitorRunner().runMonitor( + monitor, + monitorCtx, + periodStart, + periodEnd, + dryrun, + executionId = executionId, + transportService = transportService, + ) + } else { + QueryLevelMonitorRunner.runMonitor( + monitor, + monitorCtx, + periodStart, + periodEnd, + dryrun, + executionId = executionId, + transportService = transportService, + ) + } return runResult } else { if (monitorCtx.remoteMonitors.containsKey(monitor.monitorType)) { @@ -576,7 +592,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon periodEnd, dryrun, executionId = executionId, - transportService = transportService + transportService = transportService, ) } else { logger.info("Executing remote monitor of type ${monitor.monitorType} id ${monitor.id}") @@ -586,7 +602,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon periodEnd, dryrun, executionId, - transportService + transportService, ) } } else { @@ -594,7 +610,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon monitor.name, periodStart, periodEnd, - OpenSearchStatusException("Monitor Type ${monitor.monitorType} not known", RestStatus.BAD_REQUEST) + OpenSearchStatusException("Monitor Type ${monitor.monitorType} not known", RestStatus.BAD_REQUEST), ) } } @@ -613,28 +629,30 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon updateAlertingConfigIndexSchema() val executionId = "${monitorV2.id}_${LocalDateTime.now(ZoneOffset.UTC)}_${UUID.randomUUID()}" - val monitorV2Type = when (monitorV2) { - is PPLSQLMonitor -> PPL_SQL_MONITOR_TYPE - else -> throw IllegalStateException("Unexpected MonitorV2 type: ${monitorV2.javaClass.name}") - } + val monitorV2Type = + when (monitorV2) { + is PPLSQLMonitor -> PPL_SQL_MONITOR_TYPE + else -> throw IllegalStateException("Unexpected MonitorV2 type: ${monitorV2.javaClass.name}") + } logger.info( "Executing scheduled monitor v2 - id: ${monitorV2.id}, type: $monitorV2Type, " + - "periodEnd: $periodEnd, dryrun: $dryrun, manual: $manual, executionId: $executionId" + "periodEnd: $periodEnd, dryrun: $dryrun, manual: $manual, executionId: $executionId", ) // for now, always call PPLSQLMonitorRunner since only PPL Monitors are initially supported // to introduce new MonitorV2 type, create its MonitorRunner, and if/else branch // to the corresponding MonitorRunners based on type. For now, default to PPLSQLMonitorRunner - val runResult = PPLSQLMonitorRunner.runMonitorV2( - monitorV2, - monitorCtx, - periodEnd, - dryrun, - manual, - executionId = executionId, - transportService = transportService, - ) + val runResult = + PPLSQLMonitorRunner.runMonitorV2( + monitorV2, + monitorCtx, + periodEnd, + dryrun, + manual, + executionId = executionId, + transportService = transportService, + ) return runResult } @@ -661,9 +679,13 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon // 'threadPool.absoluteTimeInMillis()' is referring to a cached value of System.currentTimeMillis() that by default updates every 200ms internal fun currentTime() = Instant.ofEpochMilli(monitorCtx.threadPool!!.absoluteTimeInMillis()) - internal fun isActionActionable(action: Action, alert: Alert?): Boolean { - if (alert != null && alert.state == Alert.State.AUDIT) + internal fun isActionActionable( + action: Action, + alert: Alert?, + ): Boolean { + if (alert != null && alert.state == Alert.State.AUDIT) { return false + } if (alert == null || action.throttle == null) { return true } @@ -676,23 +698,31 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon return true } - internal fun compileTemplate(template: Script, ctx: TriggerExecutionContext): String { - return monitorCtx.scriptService!!.compile(template, TemplateScript.CONTEXT) + internal fun compileTemplate( + template: Script, + ctx: TriggerExecutionContext, + ): String = + monitorCtx.scriptService!! + .compile(template, TemplateScript.CONTEXT) .newInstance(template.params + mapOf("ctx" to ctx.asTemplateArg())) .execute() - } - internal fun compileTemplateV2(template: Script, ctx: TriggerV2ExecutionContext): String { - return monitorCtx.scriptService!!.compile(template, TemplateScript.CONTEXT) + internal fun compileTemplateV2( + template: Script, + ctx: TriggerV2ExecutionContext, + ): String = + monitorCtx.scriptService!! + .compile(template, TemplateScript.CONTEXT) .newInstance(template.params + mapOf("ctx" to ctx.asTemplateArg())) .execute() - } private fun updateAlertingConfigIndexSchema() { if (!IndexUtils.scheduledJobIndexUpdated && monitorCtx.clusterService != null && monitorCtx.client != null) { IndexUtils.updateIndexMapping( ScheduledJob.SCHEDULED_JOBS_INDEX, - ScheduledJobIndices.scheduledJobMappings(), monitorCtx.clusterService!!.state(), monitorCtx.client!!.admin().indices(), + ScheduledJobIndices.scheduledJobMappings(), + monitorCtx.clusterService!!.state(), + monitorCtx.client!!.admin().indices(), object : ActionListener { override fun onResponse(response: AcknowledgedResponse) { } @@ -700,7 +730,7 @@ object MonitorRunnerService : JobRunner, CoroutineScope, AbstractLifecycleCompon override fun onFailure(t: Exception) { logger.error("Failed to update config index schema", t) } - } + }, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorV2Runner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorV2Runner.kt index ccf933148..a8d13599a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/MonitorV2Runner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/MonitorV2Runner.kt @@ -24,6 +24,6 @@ interface MonitorV2Runner { dryRun: Boolean, manual: Boolean, executionId: String, - transportService: TransportService + transportService: TransportService, ): MonitorV2RunResult<*> } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/PPLSQLMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/PPLSQLMonitorRunner.kt index d183389ff..d37f2dcba 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/PPLSQLMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/PPLSQLMonitorRunner.kt @@ -120,27 +120,29 @@ object PPLSQLMonitorRunner : MonitorV2Runner { return PPLSQLMonitorRunResult(pplSqlMonitor.name, e, mapOf(), mapOf()) } - val timeFilteredQuery = if (pplSqlMonitor.lookBackWindow != null) { - logger.debug("look back window specified for PPL Monitor: ${monitorV2.id}, injecting look back window time filter") - // if lookback window is specified, inject a top level lookback window time filter - // into the PPL query - val lookBackWindow = pplSqlMonitor.lookBackWindow!! - val lookbackPeriodStart = periodEnd.minus(lookBackWindow, ChronoUnit.MINUTES) - val timeFilteredQuery = addTimeFilter(pplSqlMonitor.query, lookbackPeriodStart, periodEnd, pplSqlMonitor.timestampField!!) - logger.debug("time filtered query: $timeFilteredQuery") - timeFilteredQuery - } else { - logger.debug("look back window not specified for PPL Monitor: ${monitorV2.id}, proceeding with original base query") - // otherwise, don't inject any time filter whatsoever - // unless the query itself has user-specified time filters, this query - // will return all applicable data in the cluster - pplSqlMonitor.query - } + val timeFilteredQuery = + if (pplSqlMonitor.lookBackWindow != null) { + logger.debug("look back window specified for PPL Monitor: ${monitorV2.id}, injecting look back window time filter") + // if lookback window is specified, inject a top level lookback window time filter + // into the PPL query + val lookBackWindow = pplSqlMonitor.lookBackWindow!! + val lookbackPeriodStart = periodEnd.minus(lookBackWindow, ChronoUnit.MINUTES) + val timeFilteredQuery = addTimeFilter(pplSqlMonitor.query, lookbackPeriodStart, periodEnd, pplSqlMonitor.timestampField!!) + logger.debug("time filtered query: $timeFilteredQuery") + timeFilteredQuery + } else { + logger.debug("look back window not specified for PPL Monitor: ${monitorV2.id}, proceeding with original base query") + // otherwise, don't inject any time filter whatsoever + // unless the query itself has user-specified time filters, this query + // will return all applicable data in the cluster + pplSqlMonitor.query + } - val monitorExecutionDuration = monitorCtx - .clusterService!! - .clusterSettings - .get(AlertingSettings.ALERT_V2_MONITOR_EXECUTION_MAX_DURATION) + val monitorExecutionDuration = + monitorCtx + .clusterService!! + .clusterSettings + .get(AlertingSettings.ALERT_V2_MONITOR_EXECUTION_MAX_DURATION) // for storing any exception that may or may not happen // while executing monitor @@ -160,7 +162,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { executionId, monitorCtx, nodeClient, - transportService + transportService, ) } } catch (e: TimeoutCancellationException) { @@ -173,7 +175,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { generateErrorAlert(null, pplSqlMonitor, e, executionId, timeOfCurrentExecution), pplSqlMonitor, it, - nodeClient + nodeClient, ) } @@ -196,7 +198,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { pplSqlMonitor.name, exception, triggerResults, - pplSqlQueryResults + pplSqlQueryResults, ) } @@ -211,7 +213,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { executionId: String, monitorCtx: MonitorRunnerExecutionContext, nodeClient: NodeClient, - transportService: TransportService + transportService: TransportService, ) { for (pplSqlTrigger in pplSqlMonitor.triggers) { try { @@ -230,11 +232,12 @@ object PPLSQLMonitorRunner : MonitorV2Runner { logger.debug("checking if custom condition is used and appending to base query") // if trigger uses custom condition, append the custom condition to query, otherwise simply proceed - val queryToExecute = if (pplSqlTrigger.conditionType == ConditionType.NUMBER_OF_RESULTS) { // number of results trigger - timeFilteredQuery - } else { // custom condition trigger - appendCustomCondition(timeFilteredQuery, pplSqlTrigger.customCondition!!) - } + val queryToExecute = + if (pplSqlTrigger.conditionType == ConditionType.NUMBER_OF_RESULTS) { // number of results trigger + timeFilteredQuery + } else { // custom condition trigger + appendCustomCondition(timeFilteredQuery, pplSqlTrigger.customCondition!!) + } // limit the number of PPL query result data rows returned val dataRowsLimit = monitorCtx.clusterService!!.clusterSettings.get(AlertingSettings.ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS) @@ -248,23 +251,26 @@ object PPLSQLMonitorRunner : MonitorV2Runner { logger.debug("executing the PPL query of monitor: ${pplSqlMonitor.id}") // execute the PPL query - val (queryResponseJson, timeTaken) = measureTimedValue { - withClosableContext( - InjectorContextElement( - pplSqlMonitor.id, - monitorCtx.settings!!, - monitorCtx.threadPool!!.threadContext, - pplSqlMonitor.user?.roles, - pplSqlMonitor.user - ) - ) { - executePplQuery( - limitedQueryToExecute, - monitorCtx.clusterService!!.state().nodes.localNode, - transportService - ) + val (queryResponseJson, timeTaken) = + measureTimedValue { + withClosableContext( + InjectorContextElement( + pplSqlMonitor.id, + monitorCtx.settings!!, + monitorCtx.threadPool!!.threadContext, + pplSqlMonitor.user?.roles, + pplSqlMonitor.user, + ), + ) { + executePplQuery( + limitedQueryToExecute, + monitorCtx.clusterService!! + .state() + .nodes.localNode, + transportService, + ) + } } - } logger.debug("query results for trigger ${pplSqlTrigger.id}: $queryResponseJson") logger.debug("time taken to execute query against sql/ppl plugin: $timeTaken") @@ -279,11 +285,12 @@ object PPLSQLMonitorRunner : MonitorV2Runner { pplSqlQueryResults[pplSqlTrigger.id] = queryResponseJson.toMap() // determine if the trigger condition has been met - val triggered = if (pplSqlTrigger.conditionType == ConditionType.NUMBER_OF_RESULTS) { // number of results trigger - evaluateNumResultsTrigger(queryResponseJson, pplSqlTrigger.numResultsCondition!!, pplSqlTrigger.numResultsValue!!) - } else { // custom condition trigger - evaluateCustomTrigger(queryResponseJson, pplSqlTrigger.customCondition!!) - } + val triggered = + if (pplSqlTrigger.conditionType == ConditionType.NUMBER_OF_RESULTS) { // number of results trigger + evaluateNumResultsTrigger(queryResponseJson, pplSqlTrigger.numResultsCondition!!, pplSqlTrigger.numResultsValue!!) + } else { // custom condition trigger + evaluateCustomTrigger(queryResponseJson, pplSqlTrigger.customCondition!!) + } logger.debug("PPLTrigger ${pplSqlTrigger.name} with ID ${pplSqlTrigger.id} triggered: $triggered") @@ -307,13 +314,14 @@ object PPLSQLMonitorRunner : MonitorV2Runner { // if this trigger is on result_set mode, this list contains exactly 1 alert // if this trigger is on per_result mode, this list has as many alerts as there are // trigger condition-meeting query results - val thisTriggersGeneratedAlerts = generateAlerts( - pplSqlTrigger, - pplSqlMonitor, - preparedQueryResults, - executionId, - timeOfCurrentExecution - ) + val thisTriggersGeneratedAlerts = + generateAlerts( + pplSqlTrigger, + pplSqlMonitor, + preparedQueryResults, + executionId, + timeOfCurrentExecution, + ) // for future throttle checks, update the trigger's last execution time // in the monitor object stored in memory @@ -322,19 +330,20 @@ object PPLSQLMonitorRunner : MonitorV2Runner { // send alert notifications for (action in pplSqlTrigger.actions) { for (queryResult in preparedQueryResults) { - val pplTriggerExecutionContext = PPLTriggerExecutionContext( - pplSqlMonitor, - null, - pplSqlTrigger, - queryResult - ) + val pplTriggerExecutionContext = + PPLTriggerExecutionContext( + pplSqlMonitor, + null, + pplSqlTrigger, + queryResult, + ) runAction( action, pplTriggerExecutionContext, monitorCtx, pplSqlMonitor, - dryRun + dryRun, ) } } @@ -350,7 +359,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { logger.error( "failed to run PPL Trigger ${pplSqlTrigger.name} (id: ${pplSqlTrigger.id} " + "from PPL Monitor ${pplSqlMonitor.name} (id: ${pplSqlMonitor.id}", - e + e, ) // generate an alert with an error message @@ -359,7 +368,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { generateErrorAlert(pplSqlTrigger, pplSqlMonitor, e, executionId, timeOfCurrentExecution), pplSqlMonitor, it, - nodeClient + nodeClient, ) } } @@ -367,7 +376,11 @@ object PPLSQLMonitorRunner : MonitorV2Runner { } // returns true if the pplTrigger should be throttled - private fun checkForThrottle(pplTrigger: PPLSQLTrigger, timeOfCurrentExecution: Instant, manual: Boolean): Boolean { + private fun checkForThrottle( + pplTrigger: PPLSQLTrigger, + timeOfCurrentExecution: Instant, + manual: Boolean, + ): Boolean { // manual calls from the user to execute a monitor should never be throttled if (manual) { return false @@ -375,9 +388,10 @@ object PPLSQLMonitorRunner : MonitorV2Runner { // the interval between throttledTimeBound and now is the throttle window // i.e. any PPLTrigger whose last trigger time is in this window must be throttled - val throttleTimeBound = pplTrigger.throttleDuration?.let { - timeOfCurrentExecution.minus(pplTrigger.throttleDuration, ChronoUnit.MINUTES) - } + val throttleTimeBound = + pplTrigger.throttleDuration?.let { + timeOfCurrentExecution.minus(pplTrigger.throttleDuration, ChronoUnit.MINUTES) + } // the trigger must be throttled if... return pplTrigger.throttleDuration != null && // throttling is enabled on the PPLTrigger @@ -390,27 +404,34 @@ object PPLSQLMonitorRunner : MonitorV2Runner { // lookbackPeriodStart: the lower bound of the query interval based on monitor schedule and look back window // periodEnd: the upper bound of the initially computed query interval based on monitor schedule // timestampField: the timestamp field that will be used to time bound the query results - private fun addTimeFilter(query: String, lookbackPeriodStart: Instant, periodEnd: Instant, timestampField: String): String { + private fun addTimeFilter( + query: String, + lookbackPeriodStart: Instant, + periodEnd: Instant, + timestampField: String, + ): String { // PPL plugin only accepts timestamp strings in this format val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss", Locale.ROOT).withZone(UTC) val periodStartPplTimestamp = formatter.format(lookbackPeriodStart) val periodEndPplTimeStamp = formatter.format(periodEnd) - val timeFilterAppend = "| where $timestampField > TIMESTAMP('$periodStartPplTimestamp') and " + - "$timestampField < TIMESTAMP('$periodEndPplTimeStamp')" + val timeFilterAppend = + "| where $timestampField > TIMESTAMP('$periodStartPplTimestamp') and " + + "$timestampField < TIMESTAMP('$periodEndPplTimeStamp')" val timeFilterReplace = "$timeFilterAppend |" - val timeFilteredQuery: String = if (query.contains("|")) { - // if Monitor query contains piped statements, inject the time filter - // as the first piped statement (i.e. before more complex statements - // like aggregations can take effect later in the query) - query.replaceFirst("|", timeFilterReplace) - } else { - // otherwise the query contains no piped statements and is simply a - // `search source=` statement, simply append time filter at the end - query + timeFilterAppend - } + val timeFilteredQuery: String = + if (query.contains("|")) { + // if Monitor query contains piped statements, inject the time filter + // as the first piped statement (i.e. before more complex statements + // like aggregations can take effect later in the query) + query.replaceFirst("|", timeFilterReplace) + } else { + // otherwise the query contains no piped statements and is simply a + // `search source=` statement, simply append time filter at the end + query + timeFilterAppend + } return timeFilteredQuery } @@ -418,7 +439,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { private fun evaluateNumResultsTrigger( pplQueryResponse: JSONObject, numResultsCondition: NumResultsCondition, - numResultsValue: Long + numResultsValue: Long, ): Boolean { val numResults = pplQueryResponse.getLong("total") return when (numResultsCondition) { @@ -431,7 +452,10 @@ object PPLSQLMonitorRunner : MonitorV2Runner { } } - private fun evaluateCustomTrigger(pplQueryResponse: JSONObject, customCondition: String): Boolean { + private fun evaluateCustomTrigger( + pplQueryResponse: JSONObject, + customCondition: String, + ): Boolean { // find the name of the eval result variable defined in custom condition val evalResultVarName = findEvalResultVar(customCondition) @@ -459,7 +483,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { pplTrigger: PPLSQLTrigger, pplQueryResults: JSONObject, maxQueryResultsSize: Long, - maxAlerts: Int + maxAlerts: Int, ): List { // case: result set // return the results as a single set of all the results @@ -504,7 +528,7 @@ object PPLSQLMonitorRunner : MonitorV2Runner { individualRows: MutableList, pplQueryResults: JSONObject, i: Int, - maxQueryResultsSize: Long + maxQueryResultsSize: Long, ) { val individualRow = JSONObject() individualRow.put("total", 1) // set the size explicitly to 1 for consistency @@ -513,8 +537,8 @@ object PPLSQLMonitorRunner : MonitorV2Runner { individualRow.put( "datarows", JSONArray().put( - JSONArray(pplQueryResults.getJSONArray("datarows").getJSONArray(i).toList()) - ) + JSONArray(pplQueryResults.getJSONArray("datarows").getJSONArray(i).toList()), + ), ) val sizeCappedIndividualRow = capPPLQueryResultsSize(individualRow, maxQueryResultsSize) individualRows.add(sizeCappedIndividualRow) @@ -525,23 +549,24 @@ object PPLSQLMonitorRunner : MonitorV2Runner { pplSqlMonitor: PPLSQLMonitor, preparedQueryResults: List, executionId: String, - timeOfCurrentExecution: Instant + timeOfCurrentExecution: Instant, ): List { val alertV2s = mutableListOf() for (queryResult in preparedQueryResults) { - val alertV2 = AlertV2( - monitorId = pplSqlMonitor.id, - monitorName = pplSqlMonitor.name, - monitorVersion = pplSqlMonitor.version, - monitorUser = pplSqlMonitor.user, - triggerId = pplSqlTrigger.id, - triggerName = pplSqlTrigger.name, - query = pplSqlMonitor.query, - queryResults = queryResult.toMap(), - triggeredTime = timeOfCurrentExecution, - severity = pplSqlTrigger.severity, - executionId = executionId - ) + val alertV2 = + AlertV2( + monitorId = pplSqlMonitor.id, + monitorName = pplSqlMonitor.name, + monitorVersion = pplSqlMonitor.version, + monitorUser = pplSqlMonitor.user, + triggerId = pplSqlTrigger.id, + triggerName = pplSqlTrigger.name, + query = pplSqlMonitor.query, + queryResults = queryResult.toMap(), + triggeredTime = timeOfCurrentExecution, + severity = pplSqlTrigger.severity, + executionId = executionId, + ) alertV2s.add(alertV2) } @@ -553,26 +578,28 @@ object PPLSQLMonitorRunner : MonitorV2Runner { pplSqlMonitor: PPLSQLMonitor, exception: Exception, executionId: String, - timeOfCurrentExecution: Instant + timeOfCurrentExecution: Instant, ): List { - val errorMessage = "Failed to run PPL Monitor ${pplSqlMonitor.id}, PPL Trigger ${pplSqlTrigger?.id}: " + - exception.userErrorMessage() + val errorMessage = + "Failed to run PPL Monitor ${pplSqlMonitor.id}, PPL Trigger ${pplSqlTrigger?.id}: " + + exception.userErrorMessage() val obfuscatedErrorMessage = AlertError.obfuscateIPAddresses(errorMessage) - val alertV2 = AlertV2( - monitorId = pplSqlMonitor.id, - monitorName = pplSqlMonitor.name, - monitorVersion = pplSqlMonitor.version, - monitorUser = pplSqlMonitor.user, - triggerId = pplSqlTrigger?.id ?: "", - triggerName = pplSqlTrigger?.name ?: "", - query = pplSqlMonitor.query, - queryResults = mapOf(), - triggeredTime = timeOfCurrentExecution, - errorMessage = obfuscatedErrorMessage, - severity = Severity.ERROR, - executionId = executionId - ) + val alertV2 = + AlertV2( + monitorId = pplSqlMonitor.id, + monitorName = pplSqlMonitor.name, + monitorVersion = pplSqlMonitor.version, + monitorUser = pplSqlMonitor.user, + triggerId = pplSqlTrigger?.id ?: "", + triggerName = pplSqlTrigger?.name ?: "", + query = pplSqlMonitor.query, + queryResults = mapOf(), + triggeredTime = timeOfCurrentExecution, + errorMessage = obfuscatedErrorMessage, + severity = Severity.ERROR, + executionId = executionId, + ) return listOf(alertV2) } @@ -581,18 +608,19 @@ object PPLSQLMonitorRunner : MonitorV2Runner { alerts: List, pplSqlMonitor: PPLSQLMonitor, retryPolicy: BackoffPolicy, - client: NodeClient + client: NodeClient, ) { logger.debug("received alerts: $alerts") - var requestsToRetry = alerts.flatMap { alert -> - listOf>( - IndexRequest(AlertV2Indices.ALERT_V2_INDEX) - .routing(pplSqlMonitor.id) // set routing ID to PPL Monitor ID - .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) - .id(if (alert.id != Alert.NO_ID) alert.id else null) - ) - } + var requestsToRetry = + alerts.flatMap { alert -> + listOf>( + IndexRequest(AlertV2Indices.ALERT_V2_INDEX) + .routing(pplSqlMonitor.id) // set routing ID to PPL Monitor ID + .source(alert.toXContentWithUser(XContentFactory.jsonBuilder())) + .id(if (alert.id != Alert.NO_ID) alert.id else null), + ) + } if (requestsToRetry.isEmpty()) return // Retry Bulk requests if there was any 429 response @@ -603,8 +631,10 @@ object PPLSQLMonitorRunner : MonitorV2Runner { failedResponses.forEach { logger.debug("write alerts failed responses: ${it.failureMessage}") } - requestsToRetry = failedResponses.filter { it.status() == RestStatus.TOO_MANY_REQUESTS } - .map { bulkRequest.requests()[it.itemId] as IndexRequest } + requestsToRetry = + failedResponses + .filter { it.status() == RestStatus.TOO_MANY_REQUESTS } + .map { bulkRequest.requests()[it.itemId] as IndexRequest } if (requestsToRetry.isNotEmpty()) { val retryCause = failedResponses.first { it.status() == RestStatus.TOO_MANY_REQUESTS }.failure.cause @@ -616,20 +646,23 @@ object PPLSQLMonitorRunner : MonitorV2Runner { // during monitor execution, the ppl sql monitor object stored in memory had its triggers updated // with their last trigger times. this function simply indexes those updated triggers into the // alerting-config index - private suspend fun updateMonitorWithLastTriggeredTimes(pplSqlMonitor: PPLSQLMonitor, client: NodeClient) { - val indexRequest = IndexRequest(SCHEDULED_JOBS_INDEX) - .id(pplSqlMonitor.id) - .source( - pplSqlMonitor.toXContentWithUser( - XContentFactory.jsonBuilder(), - ToXContent.MapParams( - mapOf("with_type" to "true") - ) - ) - ) - .routing(pplSqlMonitor.id) - .version(pplSqlMonitor.version) - .versionType(VersionType.EXTERNAL_GTE) + private suspend fun updateMonitorWithLastTriggeredTimes( + pplSqlMonitor: PPLSQLMonitor, + client: NodeClient, + ) { + val indexRequest = + IndexRequest(SCHEDULED_JOBS_INDEX) + .id(pplSqlMonitor.id) + .source( + pplSqlMonitor.toXContentWithUser( + XContentFactory.jsonBuilder(), + ToXContent.MapParams( + mapOf("with_type" to "true"), + ), + ), + ).routing(pplSqlMonitor.id) + .version(pplSqlMonitor.version) + .versionType(VersionType.EXTERNAL_GTE) val indexResponse = client.suspendUntil { index(indexRequest, it) } @@ -641,14 +674,17 @@ object PPLSQLMonitorRunner : MonitorV2Runner { triggerCtx: PPLTriggerExecutionContext, monitorCtx: MonitorRunnerExecutionContext, pplSqlMonitor: PPLSQLMonitor, - dryrun: Boolean + dryrun: Boolean, ) { // this function can throw an exception, which is caught by the try // catch in runMonitor() to generate an error alert - val notifSubject = if (action.subjectTemplate != null) - MonitorRunnerService.compileTemplateV2(action.subjectTemplate!!, triggerCtx) - else "" + val notifSubject = + if (action.subjectTemplate != null) { + MonitorRunnerService.compileTemplateV2(action.subjectTemplate!!, triggerCtx) + } else { + "" + } var notifMessage = MonitorRunnerService.compileTemplateV2(action.messageTemplate, triggerCtx) if (Strings.isNullOrEmpty(notifMessage)) { @@ -663,14 +699,14 @@ object PPLSQLMonitorRunner : MonitorV2Runner { monitorCtx.settings!!, monitorCtx.threadPool!!.threadContext, pplSqlMonitor.user?.roles, - pplSqlMonitor.user - ) + pplSqlMonitor.user, + ), ) { getConfigAndSendNotification( action, monitorCtx, notifSubject, - notifMessage + notifMessage, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/PPLUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/PPLUtils.kt index 0bb4babc8..f6a1459bc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/PPLUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/PPLUtils.kt @@ -14,9 +14,8 @@ import org.opensearch.sql.plugin.transport.TransportPPLQueryRequest import org.opensearch.transport.TransportService object PPLUtils { - // TODO: these are in-house PPL query parsers, find a PPL plugin dependency that does this for us - /* Regular Expressions */ + // Regular Expressions // captures the name of the result variable in a PPL monitor's custom condition // e.g. custom condition: `eval apple = avg_latency > 100` // captures: "apple" @@ -51,9 +50,10 @@ object PPLUtils { * It is assumed that upstream workflows have already validated the base query, * and that downstream workflows will validate the constructed query */ - fun appendCustomCondition(query: String, customCondition: String): String { - return "$query | $customCondition" - } + fun appendCustomCondition( + query: String, + customCondition: String, + ): String = "$query | $customCondition" /** * Appends a limit on the number of documents/data rows to retrieve from a PPL query. @@ -73,9 +73,10 @@ object PPLUtils { * // Returns: "source=logs | where status=error | head 100" * ``` */ - fun appendDataRowsLimit(query: String, maxDataRows: Long): String { - return "$query | head $maxDataRows" - } + fun appendDataRowsLimit( + query: String, + maxDataRows: Long, + ): String = "$query | head $maxDataRows" /** * Executes a PPL query and returns the response as a parsable JSONObject. @@ -95,23 +96,25 @@ object PPLUtils { suspend fun executePplQuery( query: String, localNode: DiscoveryNode, - transportService: TransportService + transportService: TransportService, ): JSONObject { // call PPL plugin to execute query - val transportPplQueryRequest = TransportPPLQueryRequest( - query, - JSONObject(mapOf("query" to query)), - null // null path falls back to a default path internal to SQL/PPL Plugin - ) - - val transportPplQueryResponse = PPLPluginInterface.suspendUntil { - this.executeQuery( - transportService, - localNode, - transportPplQueryRequest, - it + val transportPplQueryRequest = + TransportPPLQueryRequest( + query, + JSONObject(mapOf("query" to query)), + null, // null path falls back to a default path internal to SQL/PPL Plugin ) - } + + val transportPplQueryResponse = + PPLPluginInterface.suspendUntil { + this.executeQuery( + transportService, + localNode, + transportPplQueryRequest, + it, + ) + } val queryResponseJson = JSONObject(transportPplQueryResponse.result) @@ -148,8 +151,9 @@ object PPLUtils { */ fun findEvalResultVar(customCondition: String): String { // TODO: these are in-house PPL query parsers, find a PPL plugin dependency that does this for us - val evalResultVar = evalResultVarRegex.find(customCondition)?.groupValues?.get(1) - ?: throw IllegalArgumentException("Given custom condition is invalid, could not find eval result variable") + val evalResultVar = + evalResultVarRegex.find(customCondition)?.groupValues?.get(1) + ?: throw IllegalArgumentException("Given custom condition is invalid, could not find eval result variable") return evalResultVar } @@ -170,7 +174,10 @@ object PPLUtils { * executed successfully. If not found, this indicates an unexpected state. * @note The query response schema is assumed to follow PPL plugin Execute API response schema */ - fun findEvalResultVarIdxInSchema(customConditionQueryResponse: JSONObject, evalResultVarName: String): Int { + fun findEvalResultVarIdxInSchema( + customConditionQueryResponse: JSONObject, + evalResultVarName: String, + ): Int { // find the index eval statement result variable in the PPL query response schema val schemaList = customConditionQueryResponse.getJSONArray("schema") var evalResultVarIdx = -1 @@ -188,7 +195,7 @@ object PPLUtils { if (evalResultVarIdx == -1) { throw IllegalStateException( "Expected to find eval statement results variable \"$evalResultVarName\" in results " + - "of PPL query with custom condition, but did not." + "of PPL query with custom condition, but did not.", ) } @@ -227,11 +234,17 @@ object PPLUtils { fun getIndicesFromPplQuery(pplQuery: String): List { // use find() instead of findAll() because a PPL query only ever has one source statement // the only capture group specified in the regex captures the comma separated string of indices/index patterns - val indices = indicesListRegex.find(pplQuery)?.groupValues?.get(1)?.split(",")?.map { it.trim() } - ?: throw IllegalStateException( - "Could not find indices that PPL Monitor query searches even " + - "after validating the query through SQL/PPL plugin." - ) + val indices = + indicesListRegex + .find(pplQuery) + ?.groupValues + ?.get(1) + ?.split(",") + ?.map { it.trim() } + ?: throw IllegalStateException( + "Could not find indices that PPL Monitor query searches even " + + "after validating the query through SQL/PPL plugin.", + ) // remove any backticks that might have been read in val unBackTickedIndices = mutableListOf() @@ -276,7 +289,10 @@ object PPLUtils { * - `total`: Total number of result rows * - `size`: Same as `total` (redundant field in PPL response) */ - fun capPPLQueryResultsSize(pplQueryResults: JSONObject, maxSize: Long): JSONObject { + fun capPPLQueryResultsSize( + pplQueryResults: JSONObject, + maxSize: Long, + ): JSONObject { // estimate byte size with serialized string length // if query results size are already under the limit, do nothing // and return the query results as is diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt index c9855210e..26244a147 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/QueryLevelMonitorRunner.kt @@ -34,7 +34,7 @@ object QueryLevelMonitorRunner : MonitorRunner() { dryrun: Boolean, workflowRunContext: WorkflowRunContext?, executionId: String, - transportService: TransportService + transportService: TransportService, ): MonitorRunResult { val roles = MonitorRunnerService.getRolesForMonitor(monitor) logger.debug("Running monitor: ${monitor.name} with roles: $roles Thread: ${Thread.currentThread().name}") @@ -44,16 +44,17 @@ object QueryLevelMonitorRunner : MonitorRunner() { } var monitorResult = MonitorRunResult(monitor.name, periodStart, periodEnd) - val currentAlerts = try { - monitorCtx.alertIndices!!.createOrUpdateAlertIndex(monitor.dataSources) - monitorCtx.alertIndices!!.createOrUpdateInitialAlertHistoryIndex(monitor.dataSources) - monitorCtx.alertService!!.loadCurrentAlertsForQueryLevelMonitor(monitor, workflowRunContext) - } catch (e: Exception) { - // We can't save ERROR alerts to the index here as we don't know if there are existing ACTIVE alerts - val id = if (monitor.id.trim().isEmpty()) "_na_" else monitor.id - logger.error("Error loading alerts for monitor: $id", e) - return monitorResult.copy(error = e) - } + val currentAlerts = + try { + monitorCtx.alertIndices!!.createOrUpdateAlertIndex(monitor.dataSources) + monitorCtx.alertIndices!!.createOrUpdateInitialAlertHistoryIndex(monitor.dataSources) + monitorCtx.alertService!!.loadCurrentAlertsForQueryLevelMonitor(monitor, workflowRunContext) + } catch (e: Exception) { + // We can't save ERROR alerts to the index here as we don't know if there are existing ACTIVE alerts + val id = if (monitor.id.trim().isEmpty()) "_na_" else monitor.id + logger.error("Error loading alerts for monitor: $id", e) + return monitorResult.copy(error = e) + } if (!isADMonitor(monitor)) { withClosableContext( InjectorContextElement( @@ -61,17 +62,26 @@ object QueryLevelMonitorRunner : MonitorRunner() { monitorCtx.settings!!, monitorCtx.threadPool!!.threadContext, roles, - monitor.user - ) + monitor.user, + ), ) { - monitorResult = monitorResult.copy( - inputResults = monitorCtx.inputService!!.collectInputResults(monitor, periodStart, periodEnd, null, workflowRunContext) - ) + monitorResult = + monitorResult.copy( + inputResults = + monitorCtx.inputService!!.collectInputResults( + monitor, + periodStart, + periodEnd, + null, + workflowRunContext, + ), + ) } } else { - monitorResult = monitorResult.copy( - inputResults = monitorCtx.inputService!!.collectInputResultsForADMonitor(monitor, periodStart, periodEnd) - ) + monitorResult = + monitorResult.copy( + inputResults = monitorCtx.inputService!!.collectInputResultsForADMonitor(monitor, periodStart, periodEnd), + ) } val updatedAlerts = mutableListOf() @@ -79,31 +89,40 @@ object QueryLevelMonitorRunner : MonitorRunner() { val maxComments = monitorCtx.clusterService!!.clusterSettings.get(AlertingSettings.MAX_COMMENTS_PER_NOTIFICATION) val alertsToExecuteActionsForIds = currentAlerts.mapNotNull { it.value }.map { it.id } - val allAlertsComments = CommentsUtils.getCommentsForAlertNotification( - monitorCtx.client!!, - alertsToExecuteActionsForIds, - maxComments - ) + val allAlertsComments = + CommentsUtils.getCommentsForAlertNotification( + monitorCtx.client!!, + alertsToExecuteActionsForIds, + maxComments, + ) for (trigger in monitor.triggers) { val currentAlert = currentAlerts[trigger] - val currentAlertContext = currentAlert?.let { - AlertContext(alert = currentAlert, comments = allAlertsComments[currentAlert.id]) - } + val currentAlertContext = + currentAlert?.let { + AlertContext(alert = currentAlert, comments = allAlertsComments[currentAlert.id]) + } val triggerCtx = QueryLevelTriggerExecutionContext(monitor, trigger as QueryLevelTrigger, monitorResult, currentAlertContext) - val triggerResult = when (Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT))) { - Monitor.MonitorType.QUERY_LEVEL_MONITOR -> - monitorCtx.triggerService!!.runQueryLevelTrigger(monitor, trigger, triggerCtx) - Monitor.MonitorType.CLUSTER_METRICS_MONITOR -> { - val remoteMonitoringEnabled = - monitorCtx.clusterService!!.clusterSettings.get(AlertingSettings.CROSS_CLUSTER_MONITORING_ENABLED) - logger.debug("Remote monitoring enabled: {}", remoteMonitoringEnabled) - if (remoteMonitoringEnabled) - monitorCtx.triggerService!!.runClusterMetricsTrigger(monitor, trigger, triggerCtx, monitorCtx.clusterService!!) - else monitorCtx.triggerService!!.runQueryLevelTrigger(monitor, trigger, triggerCtx) + val triggerResult = + when (Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT))) { + Monitor.MonitorType.QUERY_LEVEL_MONITOR -> { + monitorCtx.triggerService!!.runQueryLevelTrigger(monitor, trigger, triggerCtx) + } + + Monitor.MonitorType.CLUSTER_METRICS_MONITOR -> { + val remoteMonitoringEnabled = + monitorCtx.clusterService!!.clusterSettings.get(AlertingSettings.CROSS_CLUSTER_MONITORING_ENABLED) + logger.debug("Remote monitoring enabled: {}", remoteMonitoringEnabled) + if (remoteMonitoringEnabled) { + monitorCtx.triggerService!!.runClusterMetricsTrigger(monitor, trigger, triggerCtx, monitorCtx.clusterService!!) + } else { + monitorCtx.triggerService!!.runQueryLevelTrigger(monitor, trigger, triggerCtx) + } + } + + else -> { + throw IllegalArgumentException("Unsupported monitor type: ${monitor.monitorType}.") + } } - else -> - throw IllegalArgumentException("Unsupported monitor type: ${monitor.monitorType}.") - } triggerResults[trigger.id] = triggerResult @@ -114,13 +133,14 @@ object QueryLevelMonitorRunner : MonitorRunner() { } } - val updatedAlert = monitorCtx.alertService!!.composeQueryLevelAlert( - triggerCtx, - triggerResult, - monitorResult.alertError() ?: triggerResult.alertError(), - executionId, - workflowRunContext - ) + val updatedAlert = + monitorCtx.alertService!!.composeQueryLevelAlert( + triggerCtx, + triggerResult, + monitorResult.alertError() ?: triggerResult.alertError(), + executionId, + workflowRunContext, + ) if (updatedAlert != null) updatedAlerts += updatedAlert } @@ -131,7 +151,7 @@ object QueryLevelMonitorRunner : MonitorRunner() { monitor.dataSources, updatedAlerts, it, - routingId = monitor.id + routingId = monitor.id, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt index e59b48eeb..c71cc7bae 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/TriggerService.kt @@ -40,10 +40,15 @@ import org.opensearch.search.aggregations.Aggregations import org.opensearch.search.aggregations.support.AggregationPath /** Service that handles executing Triggers */ -class TriggerService(val scriptService: ScriptService) { - +class TriggerService( + val scriptService: ScriptService, +) { private val logger = LogManager.getLogger(TriggerService::class.java) + + @Suppress("ktlint:standard:property-naming") private val ALWAYS_RUN = Script("return true") + + @Suppress("ktlint:standard:property-naming") private val NEVER_RUN = Script("return false") fun isQueryLevelTriggerActionable( @@ -69,25 +74,26 @@ class TriggerService(val scriptService: ScriptService) { fun runQueryLevelTrigger( monitor: Monitor, trigger: QueryLevelTrigger, - ctx: QueryLevelTriggerExecutionContext - ): QueryLevelTriggerRunResult { - return try { - val triggered = scriptService.compile(trigger.condition, TriggerScript.CONTEXT) - .newInstance(trigger.condition.params) - .execute(ctx) + ctx: QueryLevelTriggerExecutionContext, + ): QueryLevelTriggerRunResult = + try { + val triggered = + scriptService + .compile(trigger.condition, TriggerScript.CONTEXT) + .newInstance(trigger.condition.params) + .execute(ctx) QueryLevelTriggerRunResult(trigger.name, triggered, null) } catch (e: Exception) { logger.info("Error running script for monitor ${monitor.id}, trigger: ${trigger.id}", e) // if the script fails we need to send an alert so set triggered = true QueryLevelTriggerRunResult(trigger.name, true, e) } - } fun runClusterMetricsTrigger( monitor: Monitor, trigger: QueryLevelTrigger, ctx: QueryLevelTriggerExecutionContext, - clusterService: ClusterService + clusterService: ClusterService, ): ClusterMetricsTriggerRunResult { var runResult: ClusterMetricsTriggerRunResult? try { @@ -99,9 +105,11 @@ class TriggerService(val scriptService: ScriptService) { // Reducing the inputResults to only include results from 1 cluster at a time val clusterTriggerCtx = ctx.copy(results = listOf(mapOf(clusterResult.toPair()))) - val clusterTriggered = scriptService.compile(trigger.condition, TriggerScript.CONTEXT) - .newInstance(trigger.condition.params) - .execute(clusterTriggerCtx) + val clusterTriggered = + scriptService + .compile(trigger.condition, TriggerScript.CONTEXT) + .newInstance(trigger.condition.params) + .execute(clusterTriggerCtx) if (clusterTriggered) { triggered = clusterTriggered @@ -109,18 +117,23 @@ class TriggerService(val scriptService: ScriptService) { } } } else { - triggered = scriptService.compile(trigger.condition, TriggerScript.CONTEXT) - .newInstance(trigger.condition.params) - .execute(ctx) - if (triggered) clusterTriggerResults - .add(ClusterTriggerResult(cluster = clusterService.clusterName.value(), triggered = triggered)) + triggered = + scriptService + .compile(trigger.condition, TriggerScript.CONTEXT) + .newInstance(trigger.condition.params) + .execute(ctx) + if (triggered) { + clusterTriggerResults + .add(ClusterTriggerResult(cluster = clusterService.clusterName.value(), triggered = triggered)) + } } - runResult = ClusterMetricsTriggerRunResult( - triggerName = trigger.name, - triggered = triggered, - error = null, - clusterTriggerResults = clusterTriggerResults - ) + runResult = + ClusterMetricsTriggerRunResult( + triggerName = trigger.name, + triggered = triggered, + error = null, + clusterTriggerResults = clusterTriggerResults, + ) } catch (e: Exception) { logger.info("Error running script for monitor ${monitor.id}, trigger: ${trigger.id}", e) // if the script fails we need to send an alert so set triggered = true @@ -133,9 +146,9 @@ class TriggerService(val scriptService: ScriptService) { fun runDocLevelTrigger( monitor: Monitor, trigger: DocumentLevelTrigger, - queryToDocIds: Map> - ): DocumentLevelTriggerRunResult { - return try { + queryToDocIds: Map>, + ): DocumentLevelTriggerRunResult = + try { var triggeredDocs = mutableListOf() if (trigger.condition.idOrCode.equals(ALWAYS_RUN.idOrCode)) { @@ -143,8 +156,11 @@ class TriggerService(val scriptService: ScriptService) { triggeredDocs.addAll(value) } } else if (!trigger.condition.idOrCode.equals(NEVER_RUN.idOrCode)) { - triggeredDocs = TriggerExpressionParser(trigger.condition.idOrCode).parse() - .evaluate(queryToDocIds).toMutableList() + triggeredDocs = + TriggerExpressionParser(trigger.condition.idOrCode) + .parse() + .evaluate(queryToDocIds) + .toMutableList() } DocumentLevelTriggerRunResult(trigger.name, triggeredDocs, null) @@ -153,7 +169,6 @@ class TriggerService(val scriptService: ScriptService) { // if the script fails we need to send an alert so set triggered = true DocumentLevelTriggerRunResult(trigger.name, emptyList(), e) } - } fun runChainedAlertTrigger( workflow: Workflow, @@ -176,7 +191,7 @@ class TriggerService(val scriptService: ScriptService) { triggerName = trigger.name, triggered = false, error = e, - associatedAlertIds = emptySet() + associatedAlertIds = emptySet(), ) } } @@ -185,14 +200,15 @@ class TriggerService(val scriptService: ScriptService) { fun runBucketLevelTrigger( monitor: Monitor, trigger: BucketLevelTrigger, - ctx: BucketLevelTriggerExecutionContext - ): BucketLevelTriggerRunResult { - return try { + ctx: BucketLevelTriggerExecutionContext, + ): BucketLevelTriggerRunResult = + try { val bucketIndices = ((ctx.results[0][Aggregations.AGGREGATIONS_FIELD] as HashMap<*, *>)[trigger.id] as HashMap<*, *>)[BUCKET_INDICES] as List<*> - val parentBucketPath = ( - (ctx.results[0][Aggregations.AGGREGATIONS_FIELD] as HashMap<*, *>) - .get(trigger.id) as HashMap<*, *> + val parentBucketPath = + ( + (ctx.results[0][Aggregations.AGGREGATIONS_FIELD] as HashMap<*, *>) + .get(trigger.id) as HashMap<*, *> )[PARENT_BUCKET_PATH] as String val aggregationPath = AggregationPath.parse(parentBucketPath) // TODO test this part by passing sub-aggregation path @@ -213,22 +229,34 @@ class TriggerService(val scriptService: ScriptService) { logger.info("Error running trigger [${trigger.id}] for monitor [${monitor.id}]", e) BucketLevelTriggerRunResult(trigger.name, e, emptyMap()) } - } @Suppress("UNCHECKED_CAST") private fun getBucketKeyValuesList(bucket: Map): List { val keyField = Aggregation.CommonFields.KEY.preferredName val keyValuesList = mutableListOf() when { - bucket[keyField] is List<*> && bucket.containsKey(Aggregation.CommonFields.KEY_AS_STRING.preferredName) -> + bucket[keyField] is List<*> && bucket.containsKey(Aggregation.CommonFields.KEY_AS_STRING.preferredName) -> { keyValuesList.add(bucket[Aggregation.CommonFields.KEY_AS_STRING.preferredName] as String) - bucket[keyField] is String -> keyValuesList.add(bucket[keyField] as String) + } + + bucket[keyField] is String -> { + keyValuesList.add(bucket[keyField] as String) + } + // In the case where the key field is an Int - bucket[keyField] is Int -> keyValuesList.add(bucket[keyField].toString()) + bucket[keyField] is Int -> { + keyValuesList.add(bucket[keyField].toString()) + } + // In the case where the key field is an object with multiple values (such as a composite aggregation with more than one source) // the values will be iterated through and converted into a string - bucket[keyField] is Map<*, *> -> (bucket[keyField] as Map).values.map { keyValuesList.add(it.toString()) } - else -> throw IllegalArgumentException("Unexpected format for key in bucket [$bucket]") + bucket[keyField] is Map<*, *> -> { + (bucket[keyField] as Map).values.map { keyValuesList.add(it.toString()) } + } + + else -> { + throw IllegalArgumentException("Unexpected format for key in bucket [$bucket]") + } } return keyValuesList diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowMetadataService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowMetadataService.kt index 6b35fd917..ef44aab1a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowMetadataService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowMetadataService.kt @@ -58,7 +58,7 @@ object WorkflowMetadataService : client: Client, clusterService: ClusterService, xContentRegistry: NamedXContentRegistry, - settings: Settings + settings: Settings, ) { this.clusterService = clusterService this.client = client @@ -69,14 +69,18 @@ object WorkflowMetadataService : } @Suppress("ComplexMethod", "ReturnCount") - suspend fun upsertWorkflowMetadata(metadata: WorkflowMetadata, updating: Boolean): WorkflowMetadata { + suspend fun upsertWorkflowMetadata( + metadata: WorkflowMetadata, + updating: Boolean, + ): WorkflowMetadata { try { - val indexRequest = IndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(metadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) - .id(metadata.id) - .routing(metadata.workflowId) - .timeout(indexTimeout) + val indexRequest = + IndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(metadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .id(metadata.id) + .routing(metadata.workflowId) + .timeout(indexTimeout) if (updating) { indexRequest.id(metadata.id) @@ -90,6 +94,7 @@ object WorkflowMetadataService : log.error(failureReason) throw AlertingException(failureReason, RestStatus.INTERNAL_SERVER_ERROR, IllegalStateException(failureReason)) } + DocWriteResponse.Result.CREATED, DocWriteResponse.Result.UPDATED -> { log.debug("Successfully upserted WorkflowMetadata:${metadata.id} ") } @@ -100,7 +105,7 @@ object WorkflowMetadataService : if (e is OpenSearchException && e.status() == RestStatus.CONFLICT && !updating) { log.debug( "Metadata with ${metadata.id} for workflow ${metadata.workflowId} already exist." + - " Instead of creating new, updating existing metadata will be performed" + " Instead of creating new, updating existing metadata will be performed", ) return upsertWorkflowMetadata(metadata, true) } @@ -112,7 +117,7 @@ object WorkflowMetadataService : suspend fun getOrCreateWorkflowMetadata( workflow: Workflow, skipIndex: Boolean = false, - executionId: String + executionId: String, ): Pair { try { val created = true @@ -139,12 +144,13 @@ object WorkflowMetadataService : val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } return if (getResponse.isExists) { - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, - XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, + ) XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) WorkflowMetadata.parse(xcp) } else { @@ -159,7 +165,11 @@ object WorkflowMetadataService : } } - private fun createNewWorkflowMetadata(workflow: Workflow, executionId: String, isTempWorkflow: Boolean): WorkflowMetadata { + private fun createNewWorkflowMetadata( + workflow: Workflow, + executionId: String, + isTempWorkflow: Boolean, + ): WorkflowMetadata { // In the case of temp workflow (ie. workflow is in dry-run) use timestampWithUUID-metadata format // In the case of regular workflow execution, use the workflowId-metadata format val id = if (isTempWorkflow) "${LocalDateTime.now(ZoneOffset.UTC)}${UUID.randomUUID()}" else workflow.id @@ -168,7 +178,7 @@ object WorkflowMetadataService : workflowId = workflow.id, monitorIds = (workflow.inputs[0] as CompositeInput).getMonitorIds(), latestRunTime = Instant.now(), - latestExecutionId = executionId + latestExecutionId = executionId, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt index 83f8b56e0..eefcb38a2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/WorkflowService.kt @@ -42,40 +42,47 @@ class WorkflowService( * @param chainedMonitors Monitors that have previously executed * @param workflowExecutionId Execution id of the current workflow */ - suspend fun getFindingDocIdsByExecutionId(chainedMonitors: List, workflowExecutionId: String): - Pair>, List> { - if (chainedMonitors.isEmpty()) + suspend fun getFindingDocIdsByExecutionId( + chainedMonitors: List, + workflowExecutionId: String, + ): Pair>, List> { + if (chainedMonitors.isEmpty()) { return Pair(emptyMap(), listOf()) + } val dataSources = chainedMonitors[0].dataSources try { - val existsResponse: IndicesExistsResponse = client.admin().indices().suspendUntil { - exists(IndicesExistsRequest(dataSources.findingsIndex).local(true), it) - } + val existsResponse: IndicesExistsResponse = + client.admin().indices().suspendUntil { + exists(IndicesExistsRequest(dataSources.findingsIndex).local(true), it) + } if (existsResponse.isExists == false) return Pair(emptyMap(), listOf()) // Search findings index to match id of monitors and workflow execution id - val bqb = QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - Finding.MONITOR_ID_FIELD, - chainedMonitors.map { it.id } - ) - ) - .filter(QueryBuilders.termQuery(Finding.EXECUTION_ID_FIELD, workflowExecutionId)) - val searchRequest = SearchRequest() - .source( - SearchSourceBuilder() - .query(bqb) - .version(true) - .seqNoAndPrimaryTerm(true) - ) - .indices(dataSources.findingsIndex) + val bqb = + QueryBuilders + .boolQuery() + .filter( + QueryBuilders.termsQuery( + Finding.MONITOR_ID_FIELD, + chainedMonitors.map { it.id }, + ), + ).filter(QueryBuilders.termQuery(Finding.EXECUTION_ID_FIELD, workflowExecutionId)) + val searchRequest = + SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true), + ).indices(dataSources.findingsIndex) val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } // Get the findings docs val findings = mutableListOf() for (hit in searchResponse.hits) { - val xcp = XContentType.JSON.xContent() - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + val xcp = + XContentType.JSON + .xContent() + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) val finding = Finding.parse(xcp) findings.add(finding) @@ -99,19 +106,22 @@ class WorkflowService( * @param monitors List of monitor ids * @param size Expected number of monitors */ - suspend fun getMonitorsById(monitors: List, size: Int): List { + suspend fun getMonitorsById( + monitors: List, + size: Int, + ): List { try { val bqb = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitors)) - val searchRequest = SearchRequest() - .source( - SearchSourceBuilder() - .query(bqb) - .version(true) - .seqNoAndPrimaryTerm(true) - .size(size) - ) - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + val searchRequest = + SearchRequest() + .source( + SearchSourceBuilder() + .query(bqb) + .version(true) + .seqNoAndPrimaryTerm(true) + .size(size), + ).indices(ScheduledJob.SCHEDULED_JOBS_INDEX) val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } return parseMonitors(searchResponse) @@ -129,18 +139,21 @@ class WorkflowService( val monitors = mutableListOf() try { for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val scheduledJob = ScheduledJob.parse(hitsParser, hit.id, hit.version) - validateMonitorV1(scheduledJob)?.let { - throw OpenSearchException(it) - } + XContentType.JSON + .xContent() + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + val scheduledJob = ScheduledJob.parse(hitsParser, hit.id, hit.version) + validateMonitorV1(scheduledJob)?.let { + throw OpenSearchException(it) + } - val monitor = scheduledJob as Monitor - monitors.add(monitor) - } + val monitor = scheduledJob as Monitor + monitors.add(monitor) + } } } catch (e: Exception) { log.error("Error parsing monitors: ${e.message}", e) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt index c7b699dfc..b510718cf 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequest.kt @@ -25,7 +25,7 @@ class ExecuteMonitorRequest : ActionRequest { requestEnd: TimeValue, monitorId: String?, monitor: Monitor?, - requestStart: TimeValue? = null + requestStart: TimeValue? = null, ) : super() { this.dryrun = dryrun this.requestEnd = requestEnd @@ -41,13 +41,13 @@ class ExecuteMonitorRequest : ActionRequest { sin.readOptionalString(), // monitorId if (sin.readBoolean()) { Monitor.readFrom(sin) // monitor - } else null, - sin.readOptionalTimeValue() + } else { + null + }, + sin.readOptionalTimeValue(), ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponse.kt index 96b15a278..b7bc1ce9f 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponse.kt @@ -14,8 +14,9 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import java.io.IOException -class ExecuteMonitorResponse : ActionResponse, ToXContentObject { - +class ExecuteMonitorResponse : + ActionResponse, + ToXContentObject { val monitorRunResult: MonitorRunResult<*> constructor(monitorRunResult: MonitorRunResult<*>) : super() { @@ -24,7 +25,7 @@ class ExecuteMonitorResponse : ActionResponse, ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( - MonitorRunResult.readFrom(sin) // monitorRunResult + MonitorRunResult.readFrom(sin), // monitorRunResult ) @Throws(IOException::class) @@ -33,7 +34,8 @@ class ExecuteMonitorResponse : ActionResponse, ToXContentObject { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return monitorRunResult.toXContent(builder, ToXContent.EMPTY_PARAMS) - } + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = monitorRunResult.toXContent(builder, ToXContent.EMPTY_PARAMS) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt index 104448cce..fdf1cd0b9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowRequest.kt @@ -45,16 +45,20 @@ class ExecuteWorkflowRequest : ActionRequest { sin.readOptionalString(), if (sin.readBoolean()) { Workflow.readFrom(sin) - } else null, - sin.readOptionalTimeValue() + } else { + null + }, + sin.readOptionalTimeValue(), ) override fun validate(): ActionRequestValidationException? { var validationException: ActionRequestValidationException? = null if (workflowId == null && workflow == null) { - validationException = ValidateActions.addValidationError( - "Both workflow and workflow id are missing", validationException - ) + validationException = + ValidateActions.addValidationError( + "Both workflow and workflow id are missing", + validationException, + ) } return validationException } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt index 6875960c7..96b4e8c19 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/ExecuteWorkflowResponse.kt @@ -14,17 +14,19 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import java.io.IOException -class ExecuteWorkflowResponse : ActionResponse, ToXContentObject { +class ExecuteWorkflowResponse : + ActionResponse, + ToXContentObject { val workflowRunResult: WorkflowRunResult constructor( - workflowRunResult: WorkflowRunResult + workflowRunResult: WorkflowRunResult, ) : super() { this.workflowRunResult = workflowRunResult } @Throws(IOException::class) constructor(sin: StreamInput) : this( - WorkflowRunResult(sin) + WorkflowRunResult(sin), ) @Throws(IOException::class) @@ -33,7 +35,8 @@ class ExecuteWorkflowResponse : ActionResponse, ToXContentObject { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return workflowRunResult.toXContent(builder, params) - } + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = workflowRunResult.toXContent(builder, params) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt index 92fae8247..b6640c36c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsRequest.kt @@ -25,7 +25,7 @@ class GetDestinationsRequest : ActionRequest { version: Long, srcContext: FetchSourceContext?, table: Table, - destinationType: String + destinationType: String, ) : super() { this.destinationId = destinationId this.version = version @@ -38,16 +38,17 @@ class GetDestinationsRequest : ActionRequest { constructor(sin: StreamInput) : this( destinationId = sin.readOptionalString(), version = sin.readLong(), - srcContext = if (sin.readBoolean()) { - FetchSourceContext(sin) - } else null, + srcContext = + if (sin.readBoolean()) { + FetchSourceContext(sin) + } else { + null + }, table = Table.readFrom(sin), - destinationType = sin.readString() + destinationType = sin.readString(), ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsResponse.kt index 5cf7d7dec..c36b8f924 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetDestinationsResponse.kt @@ -15,8 +15,11 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import java.io.IOException -class GetDestinationsResponse : ActionResponse, ToXContentObject { +class GetDestinationsResponse : + ActionResponse, + ToXContentObject { var status: RestStatus + // totalDestinations is not the same as the size of destinations because there can be 30 destinations from the request, but // the request only asked for 5 destinations, so totalDestinations will be 30, but alerts will only contain 5 destinations var totalDestinations: Int? @@ -25,7 +28,7 @@ class GetDestinationsResponse : ActionResponse, ToXContentObject { constructor( status: RestStatus, totalDestinations: Int?, - destinations: List + destinations: List, ) : super() { this.status = status this.totalDestinations = totalDestinations @@ -55,8 +58,12 @@ class GetDestinationsResponse : ActionResponse, ToXContentObject { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + builder + .startObject() .field("totalDestinations", totalDestinations) .field("destinations", destinations) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountRequest.kt index 94b79726e..dd7b638ae 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountRequest.kt @@ -23,7 +23,7 @@ class GetEmailAccountRequest : ActionRequest { emailAccountID: String, version: Long, method: RestRequest.Method, - srcContext: FetchSourceContext? + srcContext: FetchSourceContext?, ) : super() { this.emailAccountID = emailAccountID this.version = version @@ -38,12 +38,12 @@ class GetEmailAccountRequest : ActionRequest { sin.readEnum(RestRequest.Method::class.java), // method if (sin.readBoolean()) { FetchSourceContext(sin) // srcContext - } else null + } else { + null + }, ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt index a83cdbba1..9daf3e227 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailAccountResponse.kt @@ -19,7 +19,9 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import java.io.IOException -class GetEmailAccountResponse : ActionResponse, ToXContentObject { +class GetEmailAccountResponse : + ActionResponse, + ToXContentObject { var id: String var version: Long var seqNo: Long @@ -33,7 +35,7 @@ class GetEmailAccountResponse : ActionResponse, ToXContentObject { seqNo: Long, primaryTerm: Long, status: RestStatus, - emailAccount: EmailAccount? + emailAccount: EmailAccount?, ) : super() { this.id = id this.version = version @@ -52,7 +54,9 @@ class GetEmailAccountResponse : ActionResponse, ToXContentObject { sin.readEnum(RestStatus::class.java), // RestStatus if (sin.readBoolean()) { EmailAccount.readFrom(sin) // emailAccount - } else null + } else { + null + }, ) @Throws(IOException::class) @@ -71,14 +75,19 @@ class GetEmailAccountResponse : ActionResponse, ToXContentObject { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + builder + .startObject() .field(_ID, id) .field(_VERSION, version) .field(_SEQ_NO, seqNo) .field(_PRIMARY_TERM, primaryTerm) - if (emailAccount != null) + if (emailAccount != null) { builder.field("email_account", emailAccount) + } return builder.endObject() } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupRequest.kt index bb245b075..3b8da7080 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupRequest.kt @@ -23,7 +23,7 @@ class GetEmailGroupRequest : ActionRequest { emailGroupID: String, version: Long, method: RestRequest.Method, - srcContext: FetchSourceContext? + srcContext: FetchSourceContext?, ) : super() { this.emailGroupID = emailGroupID this.version = version @@ -38,12 +38,12 @@ class GetEmailGroupRequest : ActionRequest { sin.readEnum(RestRequest.Method::class.java), // method if (sin.readBoolean()) { FetchSourceContext(sin) // srcContext - } else null + } else { + null + }, ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt index d83941ffd..d1e69b095 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetEmailGroupResponse.kt @@ -19,7 +19,9 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import java.io.IOException -class GetEmailGroupResponse : ActionResponse, ToXContentObject { +class GetEmailGroupResponse : + ActionResponse, + ToXContentObject { var id: String var version: Long var seqNo: Long @@ -33,7 +35,7 @@ class GetEmailGroupResponse : ActionResponse, ToXContentObject { seqNo: Long, primaryTerm: Long, status: RestStatus, - emailGroup: EmailGroup? + emailGroup: EmailGroup?, ) : super() { this.id = id this.version = version @@ -52,7 +54,9 @@ class GetEmailGroupResponse : ActionResponse, ToXContentObject { sin.readEnum(RestStatus::class.java), // RestStatus if (sin.readBoolean()) { EmailGroup.readFrom(sin) // emailGroup - } else null + } else { + null + }, ) @Throws(IOException::class) @@ -71,14 +75,19 @@ class GetEmailGroupResponse : ActionResponse, ToXContentObject { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + builder + .startObject() .field(_ID, id) .field(_VERSION, version) .field(_SEQ_NO, seqNo) .field(_PRIMARY_TERM, primaryTerm) - if (emailGroup != null) + if (emailGroup != null) { builder.field("email_group", emailGroup) + } return builder.endObject() } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesRequest.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesRequest.kt index 8b371ba26..aee8f29b4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesRequest.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesRequest.kt @@ -25,12 +25,10 @@ class GetRemoteIndexesRequest : ActionRequest { @Throws(IOException::class) constructor(sin: StreamInput) : this( sin.readStringList(), - sin.readBoolean() + sin.readBoolean(), ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { @@ -42,9 +40,7 @@ class GetRemoteIndexesRequest : ActionRequest { * Validates the request [indexes]. * @return TRUE if all entries are valid; else FALSE. */ - fun isValid(): Boolean { - return indexes.isNotEmpty() && indexes.all { validPattern(it) } - } + fun isValid(): Boolean = indexes.isNotEmpty() && indexes.all { validPattern(it) } /** * Validates individual entries in the request [indexes]. diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesResponse.kt b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesResponse.kt index 0f694bbf5..7ce2cba06 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesResponse.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/action/GetRemoteIndexesResponse.kt @@ -16,7 +16,9 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import java.io.IOException -class GetRemoteIndexesResponse : ActionResponse, ToXContentObject { +class GetRemoteIndexesResponse : + ActionResponse, + ToXContentObject { var clusterIndexes: List = emptyList() constructor(clusterIndexes: List) : super() { @@ -25,10 +27,13 @@ class GetRemoteIndexesResponse : ActionResponse, ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( - clusterIndexes = sin.readList((ClusterIndexes.Companion)::readFrom) + clusterIndexes = sin.readList((ClusterIndexes.Companion)::readFrom), ) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject() clusterIndexes.forEach { it.toXContent(builder, params) @@ -45,19 +50,22 @@ class GetRemoteIndexesResponse : ActionResponse, ToXContentObject { val clusterHealth: ClusterHealthStatus?, val hubCluster: Boolean, val indexes: List = listOf(), - val latency: Long - ) : ToXContentObject, Writeable { - + val latency: Long, + ) : ToXContentObject, + Writeable { @Throws(IOException::class) constructor(sin: StreamInput) : this( clusterName = sin.readString(), clusterHealth = sin.readOptionalWriteable(ClusterHealthStatus::readFrom), hubCluster = sin.readBoolean(), indexes = sin.readList((ClusterIndex.Companion)::readFrom), - latency = sin.readLong() + latency = sin.readLong(), ) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject(clusterName) builder.field(CLUSTER_NAME_FIELD, clusterName) builder.field(CLUSTER_HEALTH_FIELD, clusterHealth) @@ -86,30 +94,34 @@ class GetRemoteIndexesResponse : ActionResponse, ToXContentObject { @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): ClusterIndexes { - return ClusterIndexes(sin) - } + fun readFrom(sin: StreamInput): ClusterIndexes = ClusterIndexes(sin) } data class ClusterIndex( val indexName: String, val indexHealth: ClusterHealthStatus?, - val mappings: MappingMetadata? - ) : ToXContentObject, Writeable { - + val mappings: MappingMetadata?, + ) : ToXContentObject, + Writeable { @Throws(IOException::class) constructor(sin: StreamInput) : this( indexName = sin.readString(), indexHealth = sin.readOptionalWriteable(ClusterHealthStatus::readFrom), - mappings = sin.readOptionalWriteable(::MappingMetadata) + mappings = sin.readOptionalWriteable(::MappingMetadata), ) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject(indexName) builder.field(INDEX_NAME_FIELD, indexName) builder.field(INDEX_HEALTH_FIELD, indexHealth) - if (mappings == null) builder.startObject(MAPPINGS_FIELD).endObject() - else builder.field(MAPPINGS_FIELD, mappings.sourceAsMap()) + if (mappings == null) { + builder.startObject(MAPPINGS_FIELD).endObject() + } else { + builder.field(MAPPINGS_FIELD, mappings.sourceAsMap()) + } return builder.endObject() } @@ -126,9 +138,7 @@ class GetRemoteIndexesResponse : ActionResponse, ToXContentObject { @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): ClusterIndex { - return ClusterIndex(sin) - } + fun readFrom(sin: StreamInput): ClusterIndex = ClusterIndex(sin) } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Request.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Request.kt index 7024842ac..ef97de4c1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Request.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Request.kt @@ -24,12 +24,10 @@ class DeleteMonitorV2Request : ActionRequest { @Throws(IOException::class) constructor(sin: StreamInput) : this( monitorV2Id = sin.readString(), - refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin) + refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin), ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Response.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Response.kt index b4850b662..cd4fee879 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Response.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2Response.kt @@ -18,7 +18,7 @@ class DeleteMonitorV2Response : BaseResponse { constructor( id: String, - version: Long + version: Long, ) : super() { this.id = id this.version = version @@ -26,7 +26,7 @@ class DeleteMonitorV2Response : BaseResponse { constructor(sin: StreamInput) : this( sin.readString(), // id - sin.readLong() // version + sin.readLong(), // version ) override fun writeTo(out: StreamOutput) { @@ -34,10 +34,13 @@ class DeleteMonitorV2Response : BaseResponse { out.writeLong(version) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject() .field(IndexUtils._ID, id) .field(IndexUtils._VERSION, version) .endObject() - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Request.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Request.kt index 99a01667c..0e026af8a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Request.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Request.kt @@ -26,7 +26,7 @@ class ExecuteMonitorV2Request : ActionRequest { manual: Boolean, // if execute was called by user or by scheduled job monitorV2Id: String?, monitorV2: MonitorV2?, - requestEnd: TimeValue + requestEnd: TimeValue, ) : super() { this.dryrun = dryrun this.manual = manual @@ -45,7 +45,7 @@ class ExecuteMonitorV2Request : ActionRequest { } else { null }, - sin.readTimeValue() // requestEnd + sin.readTimeValue(), // requestEnd ) override fun validate(): ActionRequestValidationException? = diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Response.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Response.kt index 7d6eb8b1f..4f6b053cc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Response.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2Response.kt @@ -14,7 +14,9 @@ import org.opensearch.core.xcontent.ToXContentObject import org.opensearch.core.xcontent.XContentBuilder import java.io.IOException -class ExecuteMonitorV2Response : ActionResponse, ToXContentObject { +class ExecuteMonitorV2Response : + ActionResponse, + ToXContentObject { val monitorV2RunResult: MonitorV2RunResult<*> constructor(monitorV2RunResult: MonitorV2RunResult<*>) : super() { @@ -23,7 +25,7 @@ class ExecuteMonitorV2Response : ActionResponse, ToXContentObject { @Throws(IOException::class) constructor(sin: StreamInput) : this( - MonitorV2RunResult.readFrom(sin) // monitorRunResult + MonitorV2RunResult.readFrom(sin), // monitorRunResult ) @Throws(IOException::class) @@ -32,7 +34,8 @@ class ExecuteMonitorV2Response : ActionResponse, ToXContentObject { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return monitorV2RunResult.toXContent(builder, ToXContent.EMPTY_PARAMS) - } + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = monitorV2RunResult.toXContent(builder, ToXContent.EMPTY_PARAMS) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Request.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Request.kt index 008057aa4..2a11518b2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Request.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Request.kt @@ -34,9 +34,7 @@ class GetAlertsV2Request : ActionRequest { monitorV2Ids = sin.readOptionalStringList(), ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Response.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Response.kt index 0de492496..ac2d2820c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Response.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2Response.kt @@ -23,7 +23,7 @@ class GetAlertsV2Response : BaseResponse { constructor( alertV2s: List, - totalAlertV2s: Int? + totalAlertV2s: Int?, ) : super() { this.alertV2s = alertV2s this.totalAlertV2s = totalAlertV2s @@ -32,7 +32,7 @@ class GetAlertsV2Response : BaseResponse { @Throws(IOException::class) constructor(sin: StreamInput) : this( alertV2s = Collections.unmodifiableList(sin.readList(::AlertV2)), - totalAlertV2s = sin.readOptionalInt() + totalAlertV2s = sin.readOptionalInt(), ) @Throws(IOException::class) @@ -42,8 +42,12 @@ class GetAlertsV2Response : BaseResponse { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + builder + .startObject() .field("alerts_v2", alertV2s) .field("total_alerts_v2", totalAlertV2s) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Request.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Request.kt index 7f051ff1e..ed7f0e10d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Request.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Request.kt @@ -20,7 +20,7 @@ class GetMonitorV2Request : ActionRequest { constructor( monitorV2Id: String, version: Long, - srcContext: FetchSourceContext? + srcContext: FetchSourceContext?, ) : super() { this.monitorV2Id = monitorV2Id this.version = version @@ -35,12 +35,10 @@ class GetMonitorV2Request : ActionRequest { FetchSourceContext(sin) // srcContext } else { null - } + }, ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Response.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Response.kt index 5b6df334f..7f47362d5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Response.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2Response.kt @@ -29,7 +29,7 @@ class GetMonitorV2Response : BaseResponse { version: Long, seqNo: Long, primaryTerm: Long, - monitorV2: MonitorV2? + monitorV2: MonitorV2?, ) : super() { this.id = id this.version = version @@ -44,11 +44,12 @@ class GetMonitorV2Response : BaseResponse { version = sin.readLong(), // version seqNo = sin.readLong(), // seqNo primaryTerm = sin.readLong(), // primaryTerm - monitorV2 = if (sin.readBoolean()) { - MonitorV2.readFrom(sin) // monitorV2 - } else { - null - } + monitorV2 = + if (sin.readBoolean()) { + MonitorV2.readFrom(sin) // monitorV2 + } else { + null + }, ) @Throws(IOException::class) @@ -66,8 +67,12 @@ class GetMonitorV2Response : BaseResponse { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + builder + .startObject() .field(_ID, id) .field(_VERSION, version) .field(_SEQ_NO, seqNo) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Request.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Request.kt index 105408d07..a6e28695b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Request.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Request.kt @@ -30,7 +30,7 @@ class IndexMonitorV2Request : ActionRequest { refreshPolicy: WriteRequest.RefreshPolicy, method: RestRequest.Method, monitorV2: MonitorV2, - rbacRoles: List? = null + rbacRoles: List? = null, ) : super() { this.monitorId = monitorId this.seqNo = seqNo @@ -49,12 +49,10 @@ class IndexMonitorV2Request : ActionRequest { refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin), method = sin.readEnum(RestRequest.Method::class.java), monitorV2 = MonitorV2.readFrom(sin), - rbacRoles = sin.readOptionalStringList() + rbacRoles = sin.readOptionalStringList(), ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Response.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Response.kt index 99d076334..63f49fb66 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Response.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2Response.kt @@ -29,7 +29,7 @@ class IndexMonitorV2Response : BaseResponse { version: Long, seqNo: Long, primaryTerm: Long, - monitorV2: MonitorV2 + monitorV2: MonitorV2, ) : super() { this.id = id this.version = version @@ -44,7 +44,7 @@ class IndexMonitorV2Response : BaseResponse { sin.readLong(), // version sin.readLong(), // seqNo sin.readLong(), // primaryTerm - MonitorV2.readFrom(sin) // monitorV2 + MonitorV2.readFrom(sin), // monitorV2 ) @Throws(IOException::class) @@ -57,15 +57,18 @@ class IndexMonitorV2Response : BaseResponse { } @Throws(IOException::class) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject() .field(_ID, id) .field(_VERSION, version) .field(_SEQ_NO, seqNo) .field(_PRIMARY_TERM, primaryTerm) .field(MONITOR_V2_FIELD, monitorV2) .endObject() - } companion object { const val MONITOR_V2_FIELD = "monitor_v2" diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2Request.kt b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2Request.kt index c16b62fd8..13f76f6ca 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2Request.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2Request.kt @@ -16,19 +16,17 @@ class SearchMonitorV2Request : ActionRequest { val searchRequest: SearchRequest constructor( - searchRequest: SearchRequest + searchRequest: SearchRequest, ) : super() { this.searchRequest = searchRequest } @Throws(IOException::class) constructor(sin: StreamInput) : this( - searchRequest = SearchRequest(sin) + searchRequest = SearchRequest(sin), ) - override fun validate(): ActionRequestValidationException? { - return null - } + override fun validate(): ActionRequestValidationException? = null @Throws(IOException::class) override fun writeTo(out: StreamOutput) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt index 11671ce5b..fc96cef75 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertIndices.kt @@ -78,14 +78,13 @@ private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) * initiated on the cluster manager node to ensure only a single node tries to roll it over. Once we have a curator functionality * in Scheduled Jobs we can migrate to using that to rollover the index. */ -// TODO: reafactor to make a generic version of this class for finding and alerts class AlertIndices( settings: Settings, private val client: Client, private val threadPool: ThreadPool, - private val clusterService: ClusterService + private val clusterService: ClusterService, ) : ClusterStateListener { - + // TODO: reafactor to make a generic version of this class for finding and alerts init { clusterService.addListener(this) clusterService.clusterSettings.addSettingsUpdateConsumer(ALERT_HISTORY_ENABLED) { alertHistoryEnabled = it } @@ -113,7 +112,6 @@ class AlertIndices( } companion object { - /** The in progress alert history index. */ const val ALERT_INDEX = ".opendistro-alerting-alerts" @@ -142,12 +140,10 @@ class AlertIndices( const val ALL_FINDING_INDEX_PATTERN = ".opensearch-alerting-finding*" @JvmStatic - fun alertMapping() = - AlertIndices::class.java.getResource("alert_mapping.json").readText() + fun alertMapping() = AlertIndices::class.java.getResource("alert_mapping.json").readText() @JvmStatic - fun findingMapping() = - AlertIndices::class.java.getResource("finding_mapping.json").readText() + fun findingMapping() = AlertIndices::class.java.getResource("finding_mapping.json").readText() private val logger = LogManager.getLogger(AlertIndices::class.java) } @@ -195,16 +191,18 @@ class AlertIndices( rolloverAlertHistoryIndex() rolloverFindingHistoryIndex() // schedule the next rollover for approx MAX_AGE later - scheduledAlertRollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteAlertHistoryIndices() }, alertHistoryRolloverPeriod, executorName()) - scheduledFindingRollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteFindingHistoryIndices() }, findingHistoryRolloverPeriod, executorName()) + scheduledAlertRollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteAlertHistoryIndices() }, alertHistoryRolloverPeriod, executorName()) + scheduledFindingRollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteFindingHistoryIndices() }, findingHistoryRolloverPeriod, executorName()) } catch (e: Exception) { // This should be run on cluster startup logger.error( "Error creating alert/finding indices. " + "Alerts/Findings can't be recorded until clustermanager node is restarted.", - e + e, ) } } @@ -214,9 +212,7 @@ class AlertIndices( scheduledFindingRollover?.cancel() } - private fun executorName(): String { - return ThreadPool.Names.MANAGEMENT - } + private fun executorName(): String = ThreadPool.Names.MANAGEMENT override fun clusterChanged(event: ClusterChangedEvent) { // Instead of using a LocalNodeClusterManagerListener to track clustermanager changes, this service will @@ -240,22 +236,22 @@ class AlertIndices( private fun rescheduleAlertRollover() { if (clusterService.state().nodes.isLocalNodeElectedClusterManager) { scheduledAlertRollover?.cancel() - scheduledAlertRollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteAlertHistoryIndices() }, alertHistoryRolloverPeriod, executorName()) + scheduledAlertRollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteAlertHistoryIndices() }, alertHistoryRolloverPeriod, executorName()) } } private fun rescheduleFindingRollover() { if (clusterService.state().nodes.isLocalNodeElectedClusterManager) { scheduledFindingRollover?.cancel() - scheduledFindingRollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteFindingHistoryIndices() }, findingHistoryRolloverPeriod, executorName()) + scheduledFindingRollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteFindingHistoryIndices() }, findingHistoryRolloverPeriod, executorName()) } } - fun isAlertInitialized(): Boolean { - return alertIndexInitialized && alertHistoryIndexInitialized - } + fun isAlertInitialized(): Boolean = alertIndexInitialized && alertHistoryIndexInitialized fun isAlertInitialized(dataSources: DataSources): Boolean { val alertsIndex = dataSources.alertsIndex @@ -264,7 +260,10 @@ class AlertIndices( return alertIndexInitialized && alertHistoryIndexInitialized } if ( - clusterService.state().metadata.indices.containsKey(alertsIndex) && + clusterService + .state() + .metadata.indices + .containsKey(alertsIndex) && clusterService.state().metadata.hasAlias(alertsHistoryIndex) ) { return true @@ -272,9 +271,7 @@ class AlertIndices( return false } - fun isAlertHistoryEnabled(): Boolean { - return alertHistoryEnabled - } + fun isAlertHistoryEnabled(): Boolean = alertHistoryEnabled fun isFindingHistoryEnabled(): Boolean = findingHistoryEnabled @@ -287,6 +284,7 @@ class AlertIndices( } alertIndexInitialized } + suspend fun createOrUpdateAlertIndex(dataSources: DataSources) { if (dataSources.alertsIndex == ALERT_INDEX) { return createOrUpdateAlertIndex() @@ -307,24 +305,27 @@ class AlertIndices( createIndex( dataSources.alertsHistoryIndexPattern ?: ALERT_HISTORY_INDEX_PATTERN, alertMapping(), - dataSources.alertsHistoryIndex + dataSources.alertsHistoryIndex, ) } else { updateIndexMapping( dataSources.alertsHistoryIndex ?: ALERT_HISTORY_WRITE_INDEX, alertMapping(), - true + true, ) } } + suspend fun createOrUpdateInitialAlertHistoryIndex() { if (!alertHistoryIndexInitialized) { alertHistoryIndexInitialized = createIndex(ALERT_HISTORY_INDEX_PATTERN, alertMapping(), ALERT_HISTORY_WRITE_INDEX) - if (alertHistoryIndexInitialized) - IndexUtils.lastUpdatedAlertHistoryIndex = IndexUtils.getIndexNameWithAlias( - clusterService.state(), - ALERT_HISTORY_WRITE_INDEX - ) + if (alertHistoryIndexInitialized) { + IndexUtils.lastUpdatedAlertHistoryIndex = + IndexUtils.getIndexNameWithAlias( + clusterService.state(), + ALERT_HISTORY_WRITE_INDEX, + ) + } } else { updateIndexMapping(ALERT_HISTORY_WRITE_INDEX, alertMapping(), true) } @@ -335,10 +336,11 @@ class AlertIndices( if (!findingHistoryIndexInitialized) { findingHistoryIndexInitialized = createIndex(FINDING_HISTORY_INDEX_PATTERN, findingMapping(), FINDING_HISTORY_WRITE_INDEX) if (findingHistoryIndexInitialized) { - IndexUtils.lastUpdatedFindingHistoryIndex = IndexUtils.getIndexNameWithAlias( - clusterService.state(), - FINDING_HISTORY_WRITE_INDEX - ) + IndexUtils.lastUpdatedFindingHistoryIndex = + IndexUtils.getIndexNameWithAlias( + clusterService.state(), + FINDING_HISTORY_WRITE_INDEX, + ) } } else { updateIndexMapping(FINDING_HISTORY_WRITE_INDEX, findingMapping(), true) @@ -356,26 +358,32 @@ class AlertIndices( createIndex( findingsIndexPattern, findingMapping(), - findingsIndex + findingsIndex, ) } else { updateIndexMapping(findingsIndex, findingMapping(), true) } } - private suspend fun createIndex(index: String, schemaMapping: String, alias: String? = null): Boolean { + private suspend fun createIndex( + index: String, + schemaMapping: String, + alias: String? = null, + ): Boolean { // This should be a fast check of local cluster state. Should be exceedingly rare that the local cluster // state does not contain the index and multiple nodes concurrently try to create the index. // If it does happen that error is handled we catch the ResourceAlreadyExistsException - val existsResponse: IndicesExistsResponse = client.admin().indices().suspendUntil { - exists(IndicesExistsRequest(index).local(true), it) - } + val existsResponse: IndicesExistsResponse = + client.admin().indices().suspendUntil { + exists(IndicesExistsRequest(index).local(true), it) + } if (existsResponse.isExists) return true logger.debug("index: [$index] schema mappings: [$schemaMapping]") - val request = CreateIndexRequest(index) - .mapping(schemaMapping) - .settings(Settings.builder().put("index.hidden", true).build()) + val request = + CreateIndexRequest(index) + .mapping(schemaMapping) + .settings(Settings.builder().put("index.hidden", true).build()) if (alias != null) request.alias(Alias(alias)) return try { @@ -390,7 +398,11 @@ class AlertIndices( } } - private suspend fun updateIndexMapping(index: String, mapping: String, alias: Boolean = false) { + private suspend fun updateIndexMapping( + index: String, + mapping: String, + alias: Boolean = false, + ) { val clusterState = clusterService.state() var targetIndex = index if (alias) { @@ -402,8 +414,9 @@ class AlertIndices( return } - val putMappingRequest: PutMappingRequest = PutMappingRequest(targetIndex) - .source(mapping, XContentType.JSON) + val putMappingRequest: PutMappingRequest = + PutMappingRequest(targetIndex) + .source(mapping, XContentType.JSON) val updateResponse: AcknowledgedResponse = client.admin().indices().suspendUntil { putMapping(putMappingRequest, it) } if (updateResponse.isAcknowledged) { logger.info("Index mapping of $targetIndex is updated") @@ -413,7 +426,10 @@ class AlertIndices( } } - private fun setIndexUpdateFlag(index: String, targetIndex: String) { + private fun setIndexUpdateFlag( + index: String, + targetIndex: String, + ) { when (index) { ALERT_INDEX -> IndexUtils.alertIndexUpdated() ALERT_HISTORY_WRITE_INDEX -> IndexUtils.lastUpdatedAlertHistoryIndex = targetIndex @@ -438,7 +454,7 @@ class AlertIndices( map: String, docsCondition: Long, ageCondition: TimeValue, - writeIndex: String + writeIndex: String, ) { if (!initialized) { return @@ -446,7 +462,8 @@ class AlertIndices( // We have to pass null for newIndexName in order to get Elastic to increment the index count. val request = RolloverRequest(index, null) - request.createIndexRequest.index(pattern) + request.createIndexRequest + .index(pattern) .mapping(map) .settings(Settings.builder().put("index.hidden", true).build()) request.addMaxIndexDocsCondition(docsCondition) @@ -461,10 +478,11 @@ class AlertIndices( lastRolloverTime = TimeValue.timeValueMillis(threadPool.absoluteTimeInMillis()) } } + override fun onFailure(e: Exception) { logger.error("$writeIndex not roll over failed.") } - } + }, ) } @@ -476,7 +494,7 @@ class AlertIndices( alertMapping(), alertHistoryMaxDocs, alertHistoryMaxAge, - ALERT_HISTORY_WRITE_INDEX + ALERT_HISTORY_WRITE_INDEX, ) } @@ -488,23 +506,29 @@ class AlertIndices( findingMapping(), findingHistoryMaxDocs, findingHistoryMaxAge, - FINDING_HISTORY_WRITE_INDEX + FINDING_HISTORY_WRITE_INDEX, ) } - private fun deleteOldIndices(tag: String, indices: String) { + private fun deleteOldIndices( + tag: String, + indices: String, + ) { logger.info("info deleteOldIndices") - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(indices) - .metadata(true) - .local(true) - .indicesOptions(IndicesOptions.strictExpand()) + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(indices) + .metadata(true) + .local(true) + .indicesOptions(IndicesOptions.strictExpand()) client.admin().cluster().state( clusterStateRequest, object : ActionListener { override fun onResponse(clusterStateResponse: ClusterStateResponse) { - if (clusterStateResponse.state.metadata.indices.isNotEmpty()) { + if (clusterStateResponse.state.metadata.indices + .isNotEmpty() + ) { scope.launch { val indicesToDelete = getIndicesToDelete(clusterStateResponse) logger.info("Deleting old $tag indices viz $indicesToDelete") @@ -517,10 +541,11 @@ class AlertIndices( logger.info("No Old $tag Indices to delete") } } + override fun onFailure(e: Exception) { logger.error("Error fetching cluster state") } - } + }, ) } @@ -540,7 +565,7 @@ class AlertIndices( indexMetadata: IndexMetadata, retentionPeriodMillis: Long, writeIndex: String, - historyEnabled: Boolean + historyEnabled: Boolean, ): String? { val creationTime = indexMetadata.creationDate if ((Instant.now().toEpochMilli() - creationTime) > retentionPeriodMillis) { @@ -572,16 +597,17 @@ class AlertIndices( override fun onResponse(deleteIndicesResponse: AcknowledgedResponse) { if (!deleteIndicesResponse.isAcknowledged) { logger.error( - "Could not delete one or more Alerting/Finding history indices: $indicesToDelete. Retrying one by one." + "Could not delete one or more Alerting/Finding history indices: $indicesToDelete. Retrying one by one.", ) deleteOldHistoryIndex(indicesToDelete) } } + override fun onFailure(e: Exception) { logger.error("Delete for Alerting/Finding History Indices $indicesToDelete Failed. Retrying one By one.") deleteOldHistoryIndex(indicesToDelete) } - } + }, ) } } @@ -599,10 +625,11 @@ class AlertIndices( } } } + override fun onFailure(e: Exception) { logger.debug("Exception ${e.message} while deleting the index $index") } - } + }, ) } } @@ -617,26 +644,30 @@ class AlertIndices( private suspend fun getAlertIDsFromAlertHistoryIndex(indexName: String): List { val queryBuilder = QueryBuilders.matchAllQuery() - val searchSourceBuilder = SearchSourceBuilder() - .query(queryBuilder) - .version(true) + val searchSourceBuilder = + SearchSourceBuilder() + .query(queryBuilder) + .version(true) - val searchRequest = SearchRequest() - .indices(indexName) - .source(searchSourceBuilder) + val searchRequest = + SearchRequest() + .indices(indexName) + .source(searchSourceBuilder) val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - val alertIDs = searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert.id - } + val alertIDs = + searchResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert.id + } return alertIDs.distinct() } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt index 08d1e884f..c970ff0b2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/alerts/AlertMover.kt @@ -54,78 +54,98 @@ class AlertMover { * 3. Delete alerts from monitor's DataSources.alertsIndex * 4. Schedule a retry if there were any failures */ - suspend fun moveAlerts(client: Client, monitorId: String, monitor: Monitor?) { + suspend fun moveAlerts( + client: Client, + monitorId: String, + monitor: Monitor?, + ) { var alertIndex = monitor?.dataSources?.alertsIndex ?: ALERT_INDEX var alertHistoryIndex = monitor?.dataSources?.alertsHistoryIndex ?: ALERT_HISTORY_WRITE_INDEX - val boolQuery = QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitorId)) + val boolQuery = + QueryBuilders + .boolQuery() + .filter(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, monitorId)) if (monitor != null) { boolQuery.mustNot(QueryBuilders.termsQuery(Alert.TRIGGER_ID_FIELD, monitor.triggers.map { it.id })) } - val activeAlertsQuery = SearchSourceBuilder.searchSource() - .query(boolQuery) - .version(true) + val activeAlertsQuery = + SearchSourceBuilder + .searchSource() + .query(boolQuery) + .version(true) - val activeAlertsRequest = SearchRequest(alertIndex) - .routing(monitorId) - .source(activeAlertsQuery) + val activeAlertsRequest = + SearchRequest(alertIndex) + .routing(monitorId) + .source(activeAlertsQuery) val response: SearchResponse = client.suspendUntil { search(activeAlertsRequest, it) } // If no alerts are found, simply return if (response.hits.totalHits?.value == 0L) return - val indexRequests = response.hits.map { hit -> - IndexRequest(alertHistoryIndex) - .routing(monitorId) - .source( - Alert.parse(alertContentParser(hit.sourceRef), hit.id, hit.version) - .copy(state = Alert.State.DELETED) - .toXContentWithUser(XContentFactory.jsonBuilder()) - ) - .version(hit.version) - .versionType(VersionType.EXTERNAL_GTE) - .id(hit.id) - } + val indexRequests = + response.hits.map { hit -> + IndexRequest(alertHistoryIndex) + .routing(monitorId) + .source( + Alert + .parse(alertContentParser(hit.sourceRef), hit.id, hit.version) + .copy(state = Alert.State.DELETED) + .toXContentWithUser(XContentFactory.jsonBuilder()), + ).version(hit.version) + .versionType(VersionType.EXTERNAL_GTE) + .id(hit.id) + } val copyRequest = BulkRequest().add(indexRequests) val copyResponse: BulkResponse = client.suspendUntil { bulk(copyRequest, it) } - val deleteRequests = copyResponse.items.filterNot { it.isFailed }.map { - DeleteRequest(alertIndex, it.id) - .routing(monitorId) - .version(it.version) - .versionType(VersionType.EXTERNAL_GTE) - } + val deleteRequests = + copyResponse.items.filterNot { it.isFailed }.map { + DeleteRequest(alertIndex, it.id) + .routing(monitorId) + .version(it.version) + .versionType(VersionType.EXTERNAL_GTE) + } val deleteResponse: BulkResponse = client.suspendUntil { bulk(BulkRequest().add(deleteRequests), it) } if (copyResponse.hasFailures()) { - val retryCause = copyResponse.items.filter { it.isFailed } - .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } - ?.failure?.cause + val retryCause = + copyResponse.items + .filter { it.isFailed } + .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } + ?.failure + ?.cause throw RuntimeException( "Failed to copy alerts for [$monitorId, ${monitor?.triggers?.map { it.id }}]: " + copyResponse.buildFailureMessage(), - retryCause + retryCause, ) } if (deleteResponse.hasFailures()) { - val retryCause = deleteResponse.items.filter { it.isFailed } - .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } - ?.failure?.cause + val retryCause = + deleteResponse.items + .filter { it.isFailed } + .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } + ?.failure + ?.cause throw RuntimeException( "Failed to delete alerts for [$monitorId, ${monitor?.triggers?.map { it.id }}]: " + deleteResponse.buildFailureMessage(), - retryCause + retryCause, ) } } private fun alertContentParser(bytesReference: BytesReference): XContentParser { - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - bytesReference, XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + bytesReference, + XContentType.JSON, + ) XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) return xcp } @@ -141,7 +161,12 @@ class AlertMover { * 3. Delete alerts from monitor's DataSources.alertsIndex * 4. Schedule a retry if there were any failures */ - suspend fun moveAlerts(client: Client, workflowId: String, workflow: Workflow?, monitorCtx: MonitorRunnerExecutionContext) { + suspend fun moveAlerts( + client: Client, + workflowId: String, + workflow: Workflow?, + monitorCtx: MonitorRunnerExecutionContext, + ) { var alertIndex = ALERT_INDEX var alertHistoryIndex = ALERT_HISTORY_WRITE_INDEX if (workflow != null) { @@ -158,20 +183,23 @@ class AlertMover { client.suspendUntil { client.get( GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, delegates[i].monitorId), - it + it, ) } if (getResponse!!.isExists) { val monitor = ScheduledJobUtils.parseMonitorFromScheduledJobDocSource( monitorCtx.xContentRegistry!!, - response = getResponse + response = getResponse, ) alertIndex = monitor.dataSources.alertsIndex alertHistoryIndex = - if (monitor.dataSources.alertsHistoryIndex == null) alertHistoryIndex - else monitor.dataSources.alertsHistoryIndex!! + if (monitor.dataSources.alertsHistoryIndex == null) { + alertHistoryIndex + } else { + monitor.dataSources.alertsHistoryIndex!! + } } i++ } @@ -181,68 +209,82 @@ class AlertMover { } } val dataSources = DataSources().copy(alertsHistoryIndex = alertHistoryIndex, alertsIndex = alertIndex) - /** check if alert index is initialized **/ - if (monitorCtx.alertIndices!!.isAlertInitialized(dataSources) == false) + // check if alert index is initialized * + if (monitorCtx.alertIndices!!.isAlertInitialized(dataSources) == false) { return - val boolQuery = QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery(Alert.WORKFLOW_ID_FIELD, workflowId)) + } + val boolQuery = + QueryBuilders + .boolQuery() + .must(QueryBuilders.termQuery(Alert.WORKFLOW_ID_FIELD, workflowId)) if (workflow != null) { boolQuery.mustNot(QueryBuilders.termsQuery(Alert.TRIGGER_ID_FIELD, workflow.triggers.map { it.id })) } - val activeAlertsQuery = SearchSourceBuilder.searchSource() - .query(boolQuery) - .version(true) + val activeAlertsQuery = + SearchSourceBuilder + .searchSource() + .query(boolQuery) + .version(true) - val activeAlertsRequest = SearchRequest(alertIndex) - .routing(workflowId) - .source(activeAlertsQuery) + val activeAlertsRequest = + SearchRequest(alertIndex) + .routing(workflowId) + .source(activeAlertsQuery) val response: SearchResponse = client.suspendUntil { search(activeAlertsRequest, it) } // If no alerts are found, simply return if (response.hits.totalHits?.value == 0L) return - val indexRequests = response.hits.map { hit -> - IndexRequest(alertHistoryIndex) - .routing(workflowId) - .source( - Alert.parse(alertContentParser(hit.sourceRef), hit.id, hit.version) - .copy(state = Alert.State.DELETED) - .toXContentWithUser(XContentFactory.jsonBuilder()) - ) - .version(hit.version) - .versionType(VersionType.EXTERNAL_GTE) - .id(hit.id) - } + val indexRequests = + response.hits.map { hit -> + IndexRequest(alertHistoryIndex) + .routing(workflowId) + .source( + Alert + .parse(alertContentParser(hit.sourceRef), hit.id, hit.version) + .copy(state = Alert.State.DELETED) + .toXContentWithUser(XContentFactory.jsonBuilder()), + ).version(hit.version) + .versionType(VersionType.EXTERNAL_GTE) + .id(hit.id) + } val copyRequest = BulkRequest().add(indexRequests) val copyResponse: BulkResponse = client.suspendUntil { bulk(copyRequest, it) } - val deleteRequests = copyResponse.items.filterNot { it.isFailed }.map { - DeleteRequest(alertIndex, it.id) - .routing(workflowId) - .version(it.version) - .versionType(VersionType.EXTERNAL_GTE) - } + val deleteRequests = + copyResponse.items.filterNot { it.isFailed }.map { + DeleteRequest(alertIndex, it.id) + .routing(workflowId) + .version(it.version) + .versionType(VersionType.EXTERNAL_GTE) + } val deleteResponse: BulkResponse = client.suspendUntil { bulk(BulkRequest().add(deleteRequests), it) } if (copyResponse.hasFailures()) { - val retryCause = copyResponse.items.filter { it.isFailed } - .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } - ?.failure?.cause + val retryCause = + copyResponse.items + .filter { it.isFailed } + .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } + ?.failure + ?.cause throw RuntimeException( "Failed to copy alerts for [$workflowId, ${workflow?.triggers?.map { it.id }}]: " + copyResponse.buildFailureMessage(), - retryCause + retryCause, ) } if (deleteResponse.hasFailures()) { - val retryCause = deleteResponse.items.filter { it.isFailed } - .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } - ?.failure?.cause + val retryCause = + deleteResponse.items + .filter { it.isFailed } + .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } + ?.failure + ?.cause throw RuntimeException( "Failed to delete alerts for [$workflowId, ${workflow?.triggers?.map { it.id }}]: " + deleteResponse.buildFailureMessage(), - retryCause + retryCause, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Indices.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Indices.kt index 75257fc25..402b284a2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Indices.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Indices.kt @@ -58,9 +58,8 @@ class AlertV2Indices( settings: Settings, private val client: Client, private val threadPool: ThreadPool, - private val clusterService: ClusterService + private val clusterService: ClusterService, ) : ClusterStateListener { - init { clusterService.addListener(this) clusterService.clusterSettings.addSettingsUpdateConsumer(ALERT_V2_HISTORY_ENABLED) { alertV2HistoryEnabled = it } @@ -77,7 +76,6 @@ class AlertV2Indices( } companion object { - /** The in progress alert history index. */ const val ALERT_V2_INDEX = ".opensearch-alerting-v2-alerts" @@ -94,8 +92,7 @@ class AlertV2Indices( const val ALL_ALERT_V2_INDEX_PATTERN = ".opensearch-alerting-v2-alert*" @JvmStatic - fun alertV2Mapping() = - AlertV2Indices::class.java.getResource("alert_v2_mapping.json").readText() + fun alertV2Mapping() = AlertV2Indices::class.java.getResource("alert_v2_mapping.json").readText() } @Volatile private var alertV2HistoryEnabled = ALERT_V2_HISTORY_ENABLED.get(settings) @@ -127,8 +124,9 @@ class AlertV2Indices( rolloverAlertV2HistoryIndex() // schedule the next rollover for approx MAX_AGE later - scheduledAlertV2Rollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteAlertV2HistoryIndices() }, alertV2HistoryRolloverPeriod, executorName()) + scheduledAlertV2Rollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteAlertV2HistoryIndices() }, alertV2HistoryRolloverPeriod, executorName()) } catch (e: Exception) { logger.error("Error rolling over alerts v2 history index.", e) } @@ -138,9 +136,7 @@ class AlertV2Indices( scheduledAlertV2Rollover?.cancel() } - private fun executorName(): String { - return ThreadPool.Names.MANAGEMENT - } + private fun executorName(): String = ThreadPool.Names.MANAGEMENT override fun clusterChanged(event: ClusterChangedEvent) { // Instead of using a LocalNodeClusterManagerListener to track clustermanager changes, this service will @@ -163,8 +159,9 @@ class AlertV2Indices( private fun rescheduleAlertRollover() { if (clusterService.state().nodes.isLocalNodeElectedClusterManager) { scheduledAlertV2Rollover?.cancel() - scheduledAlertV2Rollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteAlertV2HistoryIndices() }, alertV2HistoryRolloverPeriod, executorName()) + scheduledAlertV2Rollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteAlertV2HistoryIndices() }, alertV2HistoryRolloverPeriod, executorName()) } } @@ -181,39 +178,45 @@ class AlertV2Indices( suspend fun createOrUpdateInitialAlertV2HistoryIndex() { if (!alertV2HistoryIndexInitialized) { alertV2HistoryIndexInitialized = createIndex(ALERT_V2_HISTORY_INDEX_PATTERN, alertV2Mapping(), ALERT_V2_HISTORY_WRITE_INDEX) - if (alertV2HistoryIndexInitialized) - IndexUtils.lastUpdatedAlertV2HistoryIndex = IndexUtils.getIndexNameWithAlias( - clusterService.state(), - ALERT_V2_HISTORY_WRITE_INDEX - ) + if (alertV2HistoryIndexInitialized) { + IndexUtils.lastUpdatedAlertV2HistoryIndex = + IndexUtils.getIndexNameWithAlias( + clusterService.state(), + ALERT_V2_HISTORY_WRITE_INDEX, + ) + } } else { updateIndexMapping(ALERT_V2_HISTORY_WRITE_INDEX, alertV2Mapping(), true) } alertV2HistoryIndexInitialized } - fun isAlertV2Initialized(): Boolean { - return alertV2IndexInitialized && alertV2HistoryIndexInitialized - } + fun isAlertV2Initialized(): Boolean = alertV2IndexInitialized && alertV2HistoryIndexInitialized private fun rolloverAndDeleteAlertV2HistoryIndices() { if (alertV2HistoryEnabled) rolloverAlertV2HistoryIndex() deleteOldIndices("History", ALERT_V2_HISTORY_ALL) } - private suspend fun createIndex(index: String, schemaMapping: String, alias: String? = null): Boolean { + private suspend fun createIndex( + index: String, + schemaMapping: String, + alias: String? = null, + ): Boolean { // This should be a fast check of local cluster state. Should be exceedingly rare that the local cluster // state does not contain the index and multiple nodes concurrently try to create the index. // If it does happen that error is handled we catch the ResourceAlreadyExistsException - val existsResponse: IndicesExistsResponse = client.admin().indices().suspendUntil { - exists(IndicesExistsRequest(index).local(true), it) - } + val existsResponse: IndicesExistsResponse = + client.admin().indices().suspendUntil { + exists(IndicesExistsRequest(index).local(true), it) + } if (existsResponse.isExists) return true logger.debug("index: [$index] schema mappings: [$schemaMapping]") - val request = CreateIndexRequest(index) - .mapping(schemaMapping) - .settings(Settings.builder().put("index.hidden", true).build()) + val request = + CreateIndexRequest(index) + .mapping(schemaMapping) + .settings(Settings.builder().put("index.hidden", true).build()) if (alias != null) request.alias(Alias(alias)) return try { @@ -228,7 +231,11 @@ class AlertV2Indices( } } - private suspend fun updateIndexMapping(index: String, mapping: String, alias: Boolean = false) { + private suspend fun updateIndexMapping( + index: String, + mapping: String, + alias: Boolean = false, + ) { val clusterState = clusterService.state() var targetIndex = index if (alias) { @@ -239,8 +246,9 @@ class AlertV2Indices( return } - val putMappingRequest: PutMappingRequest = PutMappingRequest(targetIndex) - .source(mapping, XContentType.JSON) + val putMappingRequest: PutMappingRequest = + PutMappingRequest(targetIndex) + .source(mapping, XContentType.JSON) val updateResponse: AcknowledgedResponse = client.admin().indices().suspendUntil { putMapping(putMappingRequest, it) } if (updateResponse.isAcknowledged) { logger.info("Index mapping of $targetIndex is updated") @@ -250,7 +258,10 @@ class AlertV2Indices( } } - private fun setIndexUpdateFlag(index: String, targetIndex: String) { + private fun setIndexUpdateFlag( + index: String, + targetIndex: String, + ) { when (index) { ALERT_V2_INDEX -> IndexUtils.alertV2IndexUpdated() ALERT_V2_HISTORY_WRITE_INDEX -> IndexUtils.lastUpdatedAlertV2HistoryIndex = targetIndex @@ -264,7 +275,7 @@ class AlertV2Indices( map: String, docsCondition: Long, ageCondition: TimeValue, - writeIndex: String + writeIndex: String, ) { if (!initialized) { return @@ -272,7 +283,8 @@ class AlertV2Indices( // We have to pass null for newIndexName in order to get Elastic to increment the index count. val request = RolloverRequest(index, null) - request.createIndexRequest.index(pattern) + request.createIndexRequest + .index(pattern) .mapping(map) .settings(Settings.builder().put("index.hidden", true).build()) request.addMaxIndexDocsCondition(docsCondition) @@ -287,10 +299,11 @@ class AlertV2Indices( lastRolloverTime = TimeValue.timeValueMillis(threadPool.absoluteTimeInMillis()) } } + override fun onFailure(e: Exception) { logger.error("$writeIndex not roll over failed.") } - } + }, ) } @@ -302,22 +315,28 @@ class AlertV2Indices( alertV2Mapping(), alertV2HistoryMaxDocs, alertV2HistoryMaxAge, - ALERT_V2_HISTORY_WRITE_INDEX + ALERT_V2_HISTORY_WRITE_INDEX, ) } - private fun deleteOldIndices(tag: String, indices: String) { - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(indices) - .metadata(true) - .local(true) - .indicesOptions(IndicesOptions.strictExpand()) + private fun deleteOldIndices( + tag: String, + indices: String, + ) { + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(indices) + .metadata(true) + .local(true) + .indicesOptions(IndicesOptions.strictExpand()) client.admin().cluster().state( clusterStateRequest, object : ActionListener { override fun onResponse(clusterStateResponse: ClusterStateResponse) { - if (clusterStateResponse.state.metadata.indices.isNotEmpty()) { + if (clusterStateResponse.state.metadata.indices + .isNotEmpty() + ) { scope.launch { val indicesToDelete = getIndicesToDelete(clusterStateResponse) logger.info("Deleting old $tag indices viz $indicesToDelete") @@ -327,10 +346,11 @@ class AlertV2Indices( logger.info("No Old $tag Indices to delete") } } + override fun onFailure(e: Exception) { logger.error("Error fetching cluster state") } - } + }, ) } @@ -342,7 +362,7 @@ class AlertV2Indices( indexMetaData, alertV2HistoryRetentionPeriod.millis, ALERT_V2_HISTORY_WRITE_INDEX, - alertV2HistoryEnabled + alertV2HistoryEnabled, )?.let { indicesToDelete.add(it) } } return indicesToDelete @@ -352,7 +372,7 @@ class AlertV2Indices( indexMetadata: IndexMetadata, retentionPeriodMillis: Long, writeIndex: String, - historyEnabled: Boolean + historyEnabled: Boolean, ): String? { val creationTime = indexMetadata.creationDate if ((Instant.now().toEpochMilli() - creationTime) > retentionPeriodMillis) { @@ -381,16 +401,17 @@ class AlertV2Indices( override fun onResponse(deleteIndicesResponse: AcknowledgedResponse) { if (!deleteIndicesResponse.isAcknowledged) { logger.error( - "Could not delete one or more Alerting V2 history indices: $indicesToDelete. Retrying one by one." + "Could not delete one or more Alerting V2 history indices: $indicesToDelete. Retrying one by one.", ) deleteOldHistoryIndex(indicesToDelete) } } + override fun onFailure(e: Exception) { logger.error("Delete for Alerting V2 History Indices $indicesToDelete Failed. Retrying one by one.") deleteOldHistoryIndex(indicesToDelete) } - } + }, ) } } @@ -408,10 +429,11 @@ class AlertV2Indices( } } } + override fun onFailure(e: Exception) { logger.error("Exception ${e.message} while deleting the index $index") } - } + }, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Mover.kt b/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Mover.kt index bf5aa9fad..b9d67225f 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Mover.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/alertsv2/AlertV2Mover.kt @@ -110,13 +110,14 @@ class AlertV2Mover( // try to sweep current AlertV2s for expiration immediately as we might be restarting the cluster moveOrDeleteAlertV2s() // schedule expiration checks and expirations to happen repeatedly at some interval - scheduledAlertsV2CheckAndExpire = threadPool - .scheduleWithFixedDelay({ moveOrDeleteAlertV2s() }, checkForExpirationInterval, executorName) + scheduledAlertsV2CheckAndExpire = + threadPool + .scheduleWithFixedDelay({ moveOrDeleteAlertV2s() }, checkForExpirationInterval, executorName) } catch (e: Exception) { // This should be run on cluster startup logger.error( "Error sweeping AlertV2s for expiration. This cannot be done until clustermanager node is restarted.", - e + e, ) } } @@ -150,21 +151,23 @@ class AlertV2Mover( private suspend fun searchForExpiredAlerts(): List { logger.debug("beginning search for expired alerts") - /* first collect all triggers and their expire durations */ + // first collect all triggers and their expire durations // when searching the alerting-config index, only trigger IDs and their expire durations are needed - val monitorV2sSearchQuery = SearchSourceBuilder.searchSource() - .query(QueryBuilders.existsQuery(MONITOR_V2_TYPE)) - .fetchSource( - arrayOf( - "$MONITOR_V2_TYPE.$PPL_SQL_MONITOR_TYPE.$TRIGGERS_FIELD.$ID_FIELD", - "$MONITOR_V2_TYPE.$PPL_SQL_MONITOR_TYPE.$TRIGGERS_FIELD.$EXPIRE_FIELD" - ), - null - ) - .size(MAX_SEARCH_SIZE) - .version(true) - val monitorV2sRequest = SearchRequest(SCHEDULED_JOBS_INDEX) - .source(monitorV2sSearchQuery) + val monitorV2sSearchQuery = + SearchSourceBuilder + .searchSource() + .query(QueryBuilders.existsQuery(MONITOR_V2_TYPE)) + .fetchSource( + arrayOf( + "$MONITOR_V2_TYPE.$PPL_SQL_MONITOR_TYPE.$TRIGGERS_FIELD.$ID_FIELD", + "$MONITOR_V2_TYPE.$PPL_SQL_MONITOR_TYPE.$TRIGGERS_FIELD.$EXPIRE_FIELD", + ), + null, + ).size(MAX_SEARCH_SIZE) + .version(true) + val monitorV2sRequest = + SearchRequest(SCHEDULED_JOBS_INDEX) + .source(monitorV2sSearchQuery) val searchMonitorV2sResponse: SearchResponse = client.suspendUntil { search(monitorV2sRequest, it) } logger.debug("searching triggers for their expire durations") @@ -186,7 +189,7 @@ class AlertV2Mover( logger.debug("trigger to expire duration map: $triggerToExpireDuration") - /* now collect all expired alerts */ + // now collect all expired alerts logger.debug("searching active alerts index for expired alerts") val now = Instant.now().toEpochMilli() @@ -201,9 +204,10 @@ class AlertV2Mover( val maxValidTime = now - expireDurationMillis expiredAlertsBoolQuery.should( - QueryBuilders.boolQuery() + QueryBuilders + .boolQuery() .must(QueryBuilders.termQuery(TRIGGER_V2_ID_FIELD, triggerId)) - .must(QueryBuilders.rangeQuery(TRIGGERED_TIME_FIELD).lte(maxValidTime)) + .must(QueryBuilders.rangeQuery(TRIGGERED_TIME_FIELD).lte(maxValidTime)), ) } @@ -215,20 +219,24 @@ class AlertV2Mover( // that even with those measures in place, an alert that came from a // now nonexistent trigger was somehow found expiredAlertsBoolQuery.should( - QueryBuilders.boolQuery() - .mustNot(QueryBuilders.termsQuery(TRIGGER_V2_ID_FIELD, triggerToExpireDuration.keys.toList())) + QueryBuilders + .boolQuery() + .mustNot(QueryBuilders.termsQuery(TRIGGER_V2_ID_FIELD, triggerToExpireDuration.keys.toList())), ) // Explicitly specify that at least one should clause must match expiredAlertsBoolQuery.minimumShouldMatch(1) // search for the expired alerts - val expiredAlertsSearchQuery = SearchSourceBuilder.searchSource() - .query(expiredAlertsBoolQuery) - .size(MAX_SEARCH_SIZE) - .version(true) - val expiredAlertsRequest = SearchRequest(ALERT_V2_INDEX) - .source(expiredAlertsSearchQuery) + val expiredAlertsSearchQuery = + SearchSourceBuilder + .searchSource() + .query(expiredAlertsBoolQuery) + .size(MAX_SEARCH_SIZE) + .version(true) + val expiredAlertsRequest = + SearchRequest(ALERT_V2_INDEX) + .source(expiredAlertsSearchQuery) val expiredAlertsResponse: SearchResponse = client.suspendUntil { search(expiredAlertsRequest, it) } // parse the search results into full alert docs, as they will need to be @@ -236,7 +244,7 @@ class AlertV2Mover( val expiredAlertV2s = mutableListOf() expiredAlertsResponse.hits.forEach { hit -> expiredAlertV2s.add( - AlertV2.parse(alertV2ContentParser(hit.sourceRef), hit.id, hit.version) + AlertV2.parse(alertV2ContentParser(hit.sourceRef), hit.id, hit.version), ) } @@ -252,12 +260,13 @@ class AlertV2Mover( return null } - val deleteRequests = expiredAlerts.map { - DeleteRequest(ALERT_V2_INDEX, it.id) - .routing(it.monitorId) - .version(it.version) - .versionType(VersionType.EXTERNAL_GTE) - } + val deleteRequests = + expiredAlerts.map { + DeleteRequest(ALERT_V2_INDEX, it.id) + .routing(it.monitorId) + .version(it.version) + .versionType(VersionType.EXTERNAL_GTE) + } val deleteRequest = BulkRequest().add(deleteRequests) val deleteResponse: BulkResponse = client.suspendUntil { bulk(deleteRequest, it) } @@ -272,14 +281,15 @@ class AlertV2Mover( return null } - val indexRequests = expiredAlerts.map { - IndexRequest(ALERT_V2_HISTORY_WRITE_INDEX) - .routing(it.monitorId) - .source(it.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .version(it.version) - .versionType(VersionType.EXTERNAL_GTE) - .id(it.id) - } + val indexRequests = + expiredAlerts.map { + IndexRequest(ALERT_V2_HISTORY_WRITE_INDEX) + .routing(it.monitorId) + .source(it.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .version(it.version) + .versionType(VersionType.EXTERNAL_GTE) + .id(it.id) + } val copyRequest = BulkRequest().add(indexRequests) val copyResponse: BulkResponse = client.suspendUntil { bulk(copyRequest, it) } @@ -287,7 +297,10 @@ class AlertV2Mover( return copyResponse } - private suspend fun deleteExpiredAlertsThatWereCopied(copyResponse: BulkResponse?, expiredAlerts: List): BulkResponse? { + private suspend fun deleteExpiredAlertsThatWereCopied( + copyResponse: BulkResponse?, + expiredAlerts: List, + ): BulkResponse? { logger.debug("beginning to delete expired alerts that were copied to history write index") // if there were no expired alerts to copy, skip deleting anything if (copyResponse == null) { @@ -298,12 +311,13 @@ class AlertV2Mover( // monitor IDs for routing is easier val alertsById: Map = expiredAlerts.associateBy { it.id } - val deleteRequests = copyResponse.items.filterNot { it.isFailed }.map { - DeleteRequest(ALERT_V2_INDEX, it.id) - .routing(alertsById[it.id]!!.monitorId) - .version(it.version) - .versionType(VersionType.EXTERNAL_GTE) - } + val deleteRequests = + copyResponse.items.filterNot { it.isFailed }.map { + DeleteRequest(ALERT_V2_INDEX, it.id) + .routing(alertsById[it.id]!!.monitorId) + .version(it.version) + .versionType(VersionType.EXTERNAL_GTE) + } val deleteRequest = BulkRequest().add(deleteRequests) val deleteResponse: BulkResponse = client.suspendUntil { bulk(deleteRequest, it) } @@ -313,27 +327,29 @@ class AlertV2Mover( private fun checkForFailures(bulkResponse: BulkResponse?) { bulkResponse?.let { if (bulkResponse.hasFailures()) { - val retryCause = bulkResponse.items.filter { it.isFailed } - .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } - ?.failure?.cause + val retryCause = + bulkResponse.items + .filter { it.isFailed } + .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } + ?.failure + ?.cause logger.error( "Failed to move or delete alert v2s: ${bulkResponse.buildFailureMessage()}", - retryCause + retryCause, ) } } } - private fun alertV2ContentParser(bytesReference: BytesReference): XContentParser { - return XContentHelper.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - bytesReference, XContentType.JSON + private fun alertV2ContentParser(bytesReference: BytesReference): XContentParser = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + bytesReference, + XContentType.JSON, ) - } - private fun areAlertV2IndicesPresent(): Boolean { - return alertV2IndexInitialized && alertV2HistoryIndexInitialized - } + private fun areAlertV2IndicesPresent(): Boolean = alertV2IndexInitialized && alertV2HistoryIndexInitialized companion object { // this method is used by MonitorRunnerService's postIndex and postDelete @@ -341,13 +357,19 @@ class AlertV2Mover( // (in the case of alert v2 history disabled) the alerts generated by // a monitor in response to the event that the monitor gets updated // or deleted - suspend fun moveAlertV2s(monitorV2Id: String, monitorV2: MonitorV2?, monitorCtx: MonitorRunnerExecutionContext) { + suspend fun moveAlertV2s( + monitorV2Id: String, + monitorV2: MonitorV2?, + monitorCtx: MonitorRunnerExecutionContext, + ) { logger.debug("beginning to move alerts for postIndex or postDelete of monitor: $monitorV2Id") val client = monitorCtx.client!! // first collect all alerts that came from this updated or deleted monitor - val boolQuery = QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(AlertV2.MONITOR_V2_ID_FIELD, monitorV2Id)) + val boolQuery = + QueryBuilders + .boolQuery() + .filter(QueryBuilders.termQuery(AlertV2.MONITOR_V2_ID_FIELD, monitorV2Id)) /* this monitorV2 != null case happens when this function is called by postIndex. if the monitor is updated, @@ -363,17 +385,20 @@ class AlertV2Mover( likely won't explicitly pass in trigger IDs for their updated triggers that exactly match the IDs of the old triggers. this means Alerting will generate a new ID for the updated triggers by default, meaning this logic will pick up those updated triggers and correctly move/delete the alerts - */ + */ if (monitorV2 != null) { boolQuery.mustNot(QueryBuilders.termsQuery(TRIGGER_V2_ID_FIELD, monitorV2.triggers.map { it.id })) } - val alertsSearchQuery = SearchSourceBuilder.searchSource() - .query(boolQuery) - .size(MAX_SEARCH_SIZE) - .version(true) - val activeAlertsRequest = SearchRequest(ALERT_V2_INDEX) - .source(alertsSearchQuery) + val alertsSearchQuery = + SearchSourceBuilder + .searchSource() + .query(boolQuery) + .size(MAX_SEARCH_SIZE) + .version(true) + val activeAlertsRequest = + SearchRequest(ALERT_V2_INDEX) + .source(alertsSearchQuery) val searchAlertsResponse: SearchResponse = client.suspendUntil { search(activeAlertsRequest, it) } // If no alerts are found, simply return @@ -384,12 +409,14 @@ class AlertV2Mover( activeAlerts.add( AlertV2.parse( XContentHelper.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, XContentType.JSON + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, ), hit.id, - hit.version - ) + hit.version, + ), ) } @@ -404,33 +431,40 @@ class AlertV2Mover( var copyResponse: BulkResponse? = null if (alertV2HistoryEnabled) { logger.debug("alert v2 history enabled, copying alerts to history write index") - val indexRequests = searchAlertsResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, XContentType.JSON - ) - - IndexRequest(ALERT_V2_HISTORY_WRITE_INDEX) - .routing(monitorV2Id) - .source( - AlertV2.parse(xcp, hit.id, hit.version) - .toXContentWithUser(XContentFactory.jsonBuilder()) - ) - .version(hit.version) - .versionType(VersionType.EXTERNAL_GTE) - .id(hit.id) - } + val indexRequests = + searchAlertsResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + + IndexRequest(ALERT_V2_HISTORY_WRITE_INDEX) + .routing(monitorV2Id) + .source( + AlertV2 + .parse(xcp, hit.id, hit.version) + .toXContentWithUser(XContentFactory.jsonBuilder()), + ).version(hit.version) + .versionType(VersionType.EXTERNAL_GTE) + .id(hit.id) + } val copyRequest = BulkRequest().add(indexRequests) copyResponse = client.suspendUntil { bulk(copyRequest, it) } if (copyResponse!!.hasFailures()) { - val retryCause = copyResponse.items.filter { it.isFailed } - .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } - ?.failure?.cause + val retryCause = + copyResponse.items + .filter { it.isFailed } + .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } + ?.failure + ?.cause throw RuntimeException( "Failed to copy alertV2s for [$monitorV2Id, ${monitorV2?.triggers?.map { it.id }}]: " + copyResponse.buildFailureMessage(), - retryCause + retryCause, ) } } @@ -438,38 +472,42 @@ class AlertV2Mover( logger.debug("deleting alerts related to monitor: $monitorV2Id") // prepare deletion request - val deleteRequests = if (alertV2HistoryEnabled) { - // if alerts were to be migrated, delete only the ones - // that were successfully copied over - copyResponse!!.items.filterNot { it.isFailed }.map { - DeleteRequest(ALERT_V2_INDEX, it.id) - .routing(alertsById[it.id]!!.monitorId) - .version(it.version) - .versionType(VersionType.EXTERNAL_GTE) - } - } else { - // otherwise just directly get the original - // set of alerts - searchAlertsResponse.hits.map { hit -> - DeleteRequest(ALERT_V2_INDEX, hit.id) - .routing(alertsById[hit.id]!!.monitorId) - .version(hit.version) - .versionType(VersionType.EXTERNAL_GTE) + val deleteRequests = + if (alertV2HistoryEnabled) { + // if alerts were to be migrated, delete only the ones + // that were successfully copied over + copyResponse!!.items.filterNot { it.isFailed }.map { + DeleteRequest(ALERT_V2_INDEX, it.id) + .routing(alertsById[it.id]!!.monitorId) + .version(it.version) + .versionType(VersionType.EXTERNAL_GTE) + } + } else { + // otherwise just directly get the original + // set of alerts + searchAlertsResponse.hits.map { hit -> + DeleteRequest(ALERT_V2_INDEX, hit.id) + .routing(alertsById[hit.id]!!.monitorId) + .version(hit.version) + .versionType(VersionType.EXTERNAL_GTE) + } } - } // execute delete request val deleteRequest = BulkRequest().add(deleteRequests) val deleteResponse: BulkResponse = client.suspendUntil { bulk(deleteRequest, it) } if (deleteResponse.hasFailures()) { - val retryCause = deleteResponse.items.filter { it.isFailed } - .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } - ?.failure?.cause + val retryCause = + deleteResponse.items + .filter { it.isFailed } + .firstOrNull { it.status() == RestStatus.TOO_MANY_REQUESTS } + ?.failure + ?.cause throw RuntimeException( "Failed to delete alertV2s for [$monitorV2Id, ${monitorV2?.triggers?.map { it.id }}]: " + deleteResponse.buildFailureMessage(), - retryCause + retryCause, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionParser.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionParser.kt index 999b9a977..03d1f3e10 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionParser.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionParser.kt @@ -14,9 +14,8 @@ import org.opensearch.alerting.chainedAlertCondition.tokens.CAExpressionOperator * @param triggerExpression String containing the trigger expression for the monitor */ class ChainedAlertExpressionParser( - triggerExpression: String + triggerExpression: String, ) : ChainedAlertExpressionRPNBaseParser(triggerExpression) { - override fun parse(): ChainedAlertRPNResolver { val expression = expressionToParse.replace(" ", "") @@ -30,7 +29,9 @@ class ChainedAlertExpressionParser( breaks[ind].let { if (it.length > 1) { a.addAll(breakString(breaks[ind], s)) - } else a.add(it) + } else { + a.add(it) + } } } breaks.clear() @@ -40,7 +41,10 @@ class ChainedAlertExpressionParser( return ChainedAlertRPNResolver(convertInfixToPostfix(breaks)) } - private fun breakString(input: String, delimeter: String): ArrayList { + private fun breakString( + input: String, + delimeter: String, + ): ArrayList { val tokens = input.split(delimeter) val array = ArrayList() for (t in tokens) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionRPNBaseParser.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionRPNBaseParser.kt index ff3c29db7..904422fc7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionRPNBaseParser.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/parsers/ChainedAlertExpressionRPNBaseParser.kt @@ -19,7 +19,7 @@ import java.util.Stack * @param expressionToParse Complete string containing the trigger expression */ abstract class ChainedAlertExpressionRPNBaseParser( - protected val expressionToParse: String + protected val expressionToParse: String, ) : ExpressionParser { /** * To perform the Infix-to-postfix conversion of the trigger expression @@ -31,24 +31,32 @@ abstract class ChainedAlertExpressionRPNBaseParser( for (tokenString in expTokens) { if (tokenString.isEmpty()) continue when (val expToken = assignToken(tokenString)) { - is CAExpressionToken -> outputExpTokens.add(expToken) + is CAExpressionToken -> { + outputExpTokens.add(expToken) + } + is CAExpressionOperator -> { when (expToken) { - CAExpressionOperator.PAR_LEFT -> expTokenStack.push(expToken) + CAExpressionOperator.PAR_LEFT -> { + expTokenStack.push(expToken) + } + CAExpressionOperator.PAR_RIGHT -> { var topExpToken = expTokenStack.popExpTokenOrNull() while (topExpToken != null && topExpToken != CAExpressionOperator.PAR_LEFT) { outputExpTokens.add(topExpToken) topExpToken = expTokenStack.popExpTokenOrNull() } - if (topExpToken != CAExpressionOperator.PAR_LEFT) + if (topExpToken != CAExpressionOperator.PAR_LEFT) { throw java.lang.IllegalArgumentException("No matching left parenthesis.") + } } + else -> { var op2 = expTokenStack.peekExpTokenOrNull() while (op2 != null) { val c = expToken.precedence.compareTo(op2.precedence) - if (c < 0 || !expToken.rightAssociative && c <= 0) { + if (c < 0 || (!expToken.rightAssociative && c <= 0)) { outputExpTokens.add(expTokenStack.pop()) } else { break @@ -64,8 +72,9 @@ abstract class ChainedAlertExpressionRPNBaseParser( while (!expTokenStack.isEmpty()) { expTokenStack.peekExpTokenOrNull()?.let { - if (it == CAExpressionOperator.PAR_LEFT) + if (it == CAExpressionOperator.PAR_LEFT) { throw java.lang.IllegalArgumentException("No matching right parenthesis.") + } } val top = expTokenStack.pop() outputExpTokens.add(top) @@ -78,10 +87,10 @@ abstract class ChainedAlertExpressionRPNBaseParser( * Looks up and maps the expression token that matches the string version of that expression unit */ private fun assignToken(tokenString: String): ExpressionToken { - // Check "query" string in trigger expression such as in 'query[name="abc"]' - if (tokenString.startsWith(ChainedAlertExpressionConstant.ConstantType.MONITOR.ident)) + if (tokenString.startsWith(ChainedAlertExpressionConstant.ConstantType.MONITOR.ident)) { return CAExpressionToken(tokenString) + } // Check operators in trigger expression such as in [&&, ||, !] for (op in CAExpressionOperator.values()) { @@ -96,19 +105,17 @@ abstract class ChainedAlertExpressionRPNBaseParser( throw IllegalArgumentException("Error while processing the trigger expression '$tokenString'") } - private inline fun Stack.popExpTokenOrNull(): T? { - return try { + private inline fun Stack.popExpTokenOrNull(): T? = + try { pop() as T } catch (e: java.lang.Exception) { null } - } - private inline fun Stack.peekExpTokenOrNull(): T? { - return try { + private inline fun Stack.peekExpTokenOrNull(): T? = + try { peek() as T } catch (e: java.lang.Exception) { null } - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertRPNResolver.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertRPNResolver.kt index dfec9614f..87944cadc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertRPNResolver.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertRPNResolver.kt @@ -18,7 +18,6 @@ import java.util.Stack class ChainedAlertRPNResolver( private val polishNotation: ArrayList, ) : ChainedAlertTriggerResolver { - private val eqString by lazy { val stringBuilder = StringBuilder() for (expToken in polishNotation) { @@ -45,15 +44,19 @@ class ChainedAlertRPNResolver( val res = true for (expToken in polishNotation) { when (expToken) { - is CAExpressionToken -> tokenStack.push(resolveMonitorExpression(expToken.value, alertGeneratingMonitors)) + is CAExpressionToken -> { + tokenStack.push(resolveMonitorExpression(expToken.value, alertGeneratingMonitors)) + } + is CAExpressionOperator -> { val right = tokenStack.pop() - val expr = when (expToken) { - CAExpressionOperator.AND -> ChainedAlertTriggerExpression.And(tokenStack.pop(), right) - CAExpressionOperator.OR -> ChainedAlertTriggerExpression.Or(tokenStack.pop(), right) - CAExpressionOperator.NOT -> ChainedAlertTriggerExpression.Not(res, right) - else -> throw IllegalArgumentException("No matching operator.") - } + val expr = + when (expToken) { + CAExpressionOperator.AND -> ChainedAlertTriggerExpression.And(tokenStack.pop(), right) + CAExpressionOperator.OR -> ChainedAlertTriggerExpression.Or(tokenStack.pop(), right) + CAExpressionOperator.NOT -> ChainedAlertTriggerExpression.Not(res, right) + else -> throw IllegalArgumentException("No matching operator.") + } tokenStack.push(expr.resolve()) } } @@ -67,10 +70,13 @@ class ChainedAlertRPNResolver( when (expToken) { is CAExpressionToken -> { val monitorExpString = expToken.value - if (!monitorExpString.startsWith(ChainedAlertExpressionConstant.ConstantType.MONITOR.ident)) + if (!monitorExpString.startsWith(ChainedAlertExpressionConstant.ConstantType.MONITOR.ident)) { continue - val token = monitorExpString.substringAfter(ChainedAlertExpressionConstant.ConstantType.BRACKET_LEFT.ident) - .substringBefore(ChainedAlertExpressionConstant.ConstantType.BRACKET_RIGHT.ident) + } + val token = + monitorExpString + .substringAfter(ChainedAlertExpressionConstant.ConstantType.BRACKET_LEFT.ident) + .substringBefore(ChainedAlertExpressionConstant.ConstantType.BRACKET_RIGHT.ident) if (token.isEmpty()) continue val tokens = token.split(ChainedAlertExpressionConstant.ConstantType.EQUALS.ident) if (tokens.isEmpty() || tokens.size != 2) continue @@ -82,6 +88,7 @@ class ChainedAlertRPNResolver( } } } + is CAExpressionOperator -> { continue } @@ -90,10 +97,15 @@ class ChainedAlertRPNResolver( return monitorIds } - private fun resolveMonitorExpression(monitorExpString: String, alertGeneratingMonitors: Set): Boolean { + private fun resolveMonitorExpression( + monitorExpString: String, + alertGeneratingMonitors: Set, + ): Boolean { if (!monitorExpString.startsWith(ChainedAlertExpressionConstant.ConstantType.MONITOR.ident)) return false - val token = monitorExpString.substringAfter(ChainedAlertExpressionConstant.ConstantType.BRACKET_LEFT.ident) - .substringBefore(ChainedAlertExpressionConstant.ConstantType.BRACKET_RIGHT.ident) + val token = + monitorExpString + .substringAfter(ChainedAlertExpressionConstant.ConstantType.BRACKET_LEFT.ident) + .substringBefore(ChainedAlertExpressionConstant.ConstantType.BRACKET_RIGHT.ident) if (token.isEmpty()) return false val tokens = token.split(ChainedAlertExpressionConstant.ConstantType.EQUALS.ident) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerExpression.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerExpression.kt index 4b373d853..698d4e139 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerExpression.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerExpression.kt @@ -6,27 +6,41 @@ package org.opensearch.alerting.chainedAlertCondition.resolvers sealed class ChainedAlertTriggerExpression { + fun resolve(): Boolean = + when (this) { + is And -> resolveAnd(boolean1, boolean2) + is Or -> resolveOr(boolean1, boolean2) + is Not -> resolveNot(result, boolean2) + } - fun resolve(): Boolean = when (this) { - is And -> resolveAnd(boolean1, boolean2) - is Or -> resolveOr(boolean1, boolean2) - is Not -> resolveNot(result, boolean2) - } + private fun resolveAnd( + boolean1: Boolean, + boolean2: Boolean, + ): Boolean = boolean1 && boolean2 - private fun resolveAnd(boolean1: Boolean, boolean2: Boolean): Boolean { - return boolean1 && boolean2 - } + private fun resolveOr( + boolean1: Boolean, + boolean2: Boolean, + ): Boolean = boolean1 || boolean2 - private fun resolveOr(boolean1: Boolean, boolean2: Boolean): Boolean { - return boolean1 || boolean2 - } - - private fun resolveNot(result: Boolean, boolean2: Boolean): Boolean { - return result && !boolean2 - } + private fun resolveNot( + result: Boolean, + boolean2: Boolean, + ): Boolean = result && !boolean2 // Operators implemented as operator functions - class And(val boolean1: Boolean, val boolean2: Boolean) : ChainedAlertTriggerExpression() - class Or(val boolean1: Boolean, val boolean2: Boolean) : ChainedAlertTriggerExpression() - class Not(val result: Boolean, val boolean2: Boolean) : ChainedAlertTriggerExpression() + class And( + val boolean1: Boolean, + val boolean2: Boolean, + ) : ChainedAlertTriggerExpression() + + class Or( + val boolean1: Boolean, + val boolean2: Boolean, + ) : ChainedAlertTriggerExpression() + + class Not( + val result: Boolean, + val boolean2: Boolean, + ) : ChainedAlertTriggerExpression() } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerResolver.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerResolver.kt index 6f2ff2de0..aea8d6ca4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerResolver.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/resolvers/ChainedAlertTriggerResolver.kt @@ -7,5 +7,6 @@ package org.opensearch.alerting.chainedAlertCondition.resolvers interface ChainedAlertTriggerResolver { fun getMonitorIds(parsedTriggerCondition: ChainedAlertRPNResolver): Set + fun evaluate(alertGeneratingMonitors: Set): Boolean } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionOperator.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionOperator.kt index 084b6aa70..17e38a369 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionOperator.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionOperator.kt @@ -8,13 +8,16 @@ package org.opensearch.alerting.chainedAlertCondition.tokens /** * To define all the operators used in the trigger expression */ -enum class CAExpressionOperator(val value: String, val precedence: Int, val rightAssociative: Boolean) : ExpressionToken { - +enum class CAExpressionOperator( + val value: String, + val precedence: Int, + val rightAssociative: Boolean, +) : ExpressionToken { AND("&&", 2, false), OR("||", 2, false), NOT("!", 3, true), PAR_LEFT("(", 1, false), - PAR_RIGHT(")", 1, false) + PAR_RIGHT(")", 1, false), } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionToken.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionToken.kt index ddf439d3f..fa1732d2b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionToken.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/CAExpressionToken.kt @@ -8,4 +8,6 @@ package org.opensearch.alerting.chainedAlertCondition.tokens /** * To define the tokens in Trigger expression such as monitor[id=“id1"] or monitor[id=“id2"] and monitor[id=“id3"] */ -internal data class CAExpressionToken(val value: String) : ExpressionToken +internal data class CAExpressionToken( + val value: String, +) : ExpressionToken diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/ChainedAlertExpressionConstant.kt b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/ChainedAlertExpressionConstant.kt index 4b35bc4a8..d13f7c010 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/ChainedAlertExpressionConstant.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/chainedAlertCondition/tokens/ChainedAlertExpressionConstant.kt @@ -9,9 +9,12 @@ package org.opensearch.alerting.chainedAlertCondition.tokens * To define all the tokens which could be part of expression constant such as query[id=new_id], query[name=new_name], * query[tag=new_tag] */ -class ChainedAlertExpressionConstant(val type: ConstantType) : ExpressionToken { - - enum class ConstantType(val ident: String) { +class ChainedAlertExpressionConstant( + val type: ConstantType, +) : ExpressionToken { + enum class ConstantType( + val ident: String, + ) { MONITOR("monitor"), ID("id"), @@ -19,6 +22,6 @@ class ChainedAlertExpressionConstant(val type: ConstantType) : ExpressionToken { BRACKET_LEFT("["), BRACKET_RIGHT("]"), - EQUALS("=") + EQUALS("="), } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/comments/CommentsIndices.kt b/alerting/src/main/kotlin/org/opensearch/alerting/comments/CommentsIndices.kt index d12e45639..e897482b0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/comments/CommentsIndices.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/comments/CommentsIndices.kt @@ -46,9 +46,8 @@ class CommentsIndices( settings: Settings, private val client: Client, private val threadPool: ThreadPool, - private val clusterService: ClusterService + private val clusterService: ClusterService, ) : ClusterStateListener { - init { clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.COMMENTS_HISTORY_MAX_DOCS) { commentsHistoryMaxDocs = it } clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.COMMENTS_HISTORY_INDEX_MAX_AGE) { @@ -77,8 +76,7 @@ class CommentsIndices( const val ALL_COMMENTS_INDEX_PATTERN = ".opensearch-alerting-comments*" @JvmStatic - fun commentsMapping() = - CommentsIndices::class.java.getResource("alerting_comments.json").readText() + fun commentsMapping() = CommentsIndices::class.java.getResource("alerting_comments.json").readText() private val logger = LogManager.getLogger(AlertIndices::class.java) } @@ -112,13 +110,14 @@ class CommentsIndices( // try to rollover immediately as we might be restarting the cluster rolloverCommentsHistoryIndex() // schedule the next rollover for approx MAX_AGE later - scheduledCommentsRollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteCommentsHistoryIndices() }, commentsHistoryRolloverPeriod, executorName()) + scheduledCommentsRollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteCommentsHistoryIndices() }, commentsHistoryRolloverPeriod, executorName()) } catch (e: Exception) { // This should be run on cluster startup logger.error( "Error creating comments indices. Comments can't be recorded until clustermanager node is restarted.", - e + e, ) } } @@ -127,9 +126,7 @@ class CommentsIndices( scheduledCommentsRollover?.cancel() } - private fun executorName(): String { - return ThreadPool.Names.MANAGEMENT - } + private fun executorName(): String = ThreadPool.Names.MANAGEMENT override fun clusterChanged(event: ClusterChangedEvent) { // Instead of using a LocalNodeClusterManagerListener to track clustermanager changes, this service will @@ -151,23 +148,24 @@ class CommentsIndices( private fun rescheduleCommentsRollover() { if (clusterService.state().nodes.isLocalNodeElectedClusterManager) { scheduledCommentsRollover?.cancel() - scheduledCommentsRollover = threadPool - .scheduleWithFixedDelay({ rolloverAndDeleteCommentsHistoryIndices() }, commentsHistoryRolloverPeriod, executorName()) + scheduledCommentsRollover = + threadPool + .scheduleWithFixedDelay({ rolloverAndDeleteCommentsHistoryIndices() }, commentsHistoryRolloverPeriod, executorName()) } } - fun isCommentsHistoryInitialized(): Boolean { - return clusterService.state().metadata.hasAlias(COMMENTS_HISTORY_WRITE_INDEX) - } + fun isCommentsHistoryInitialized(): Boolean = clusterService.state().metadata.hasAlias(COMMENTS_HISTORY_WRITE_INDEX) suspend fun createOrUpdateInitialCommentsHistoryIndex() { if (!isCommentsHistoryInitialized()) { commentsHistoryIndexInitialized = createIndex(COMMENTS_HISTORY_INDEX_PATTERN, commentsMapping(), COMMENTS_HISTORY_WRITE_INDEX) - if (commentsHistoryIndexInitialized) - IndexUtils.lastUpdatedCommentsHistoryIndex = IndexUtils.getIndexNameWithAlias( - clusterService.state(), - COMMENTS_HISTORY_WRITE_INDEX - ) + if (commentsHistoryIndexInitialized) { + IndexUtils.lastUpdatedCommentsHistoryIndex = + IndexUtils.getIndexNameWithAlias( + clusterService.state(), + COMMENTS_HISTORY_WRITE_INDEX, + ) + } } else { updateIndexMapping(COMMENTS_HISTORY_WRITE_INDEX, commentsMapping(), true) } @@ -187,12 +185,12 @@ class CommentsIndices( commentsMapping(), commentsHistoryMaxDocs, commentsHistoryMaxAge, - COMMENTS_HISTORY_WRITE_INDEX + COMMENTS_HISTORY_WRITE_INDEX, ) } - // TODO: Everything below is boilerplate util functions straight from AlertIndices.kt /* + TODO: Everything below is boilerplate util functions straight from AlertIndices.kt Depending on whether comments system indices will be component-specific or component-agnostic, may need to either merge CommentsIndices.kt into AlertIndices.kt, or factor these out into IndexUtils.kt for both AlertIndices.kt and CommentsIndices.kt @@ -213,7 +211,7 @@ class CommentsIndices( indexMetadata: IndexMetadata, retentionPeriodMillis: Long, writeIndex: String, - historyEnabled: Boolean + historyEnabled: Boolean, ): String? { val creationTime = indexMetadata.creationDate if ((Instant.now().toEpochMilli() - creationTime) > retentionPeriodMillis) { @@ -243,16 +241,17 @@ class CommentsIndices( if (!deleteIndicesResponse.isAcknowledged) { logger.error( "Could not delete one or more comments history indices: $indicesToDelete." + - "Retrying one by one." + "Retrying one by one.", ) deleteOldHistoryIndex(indicesToDelete) } } + override fun onFailure(e: Exception) { logger.error("Delete for comments History Indices $indicesToDelete Failed. Retrying one By one.") deleteOldHistoryIndex(indicesToDelete) } - } + }, ) } } @@ -270,27 +269,34 @@ class CommentsIndices( } } } + override fun onFailure(e: Exception) { logger.debug("Exception ${e.message} while deleting the index $index") } - } + }, ) } } - private suspend fun createIndex(index: String, schemaMapping: String, alias: String? = null): Boolean { + private suspend fun createIndex( + index: String, + schemaMapping: String, + alias: String? = null, + ): Boolean { // This should be a fast check of local cluster state. Should be exceedingly rare that the local cluster // state does not contain the index and multiple nodes concurrently try to create the index. // If it does happen that error is handled we catch the ResourceAlreadyExistsException - val existsResponse: IndicesExistsResponse = client.admin().indices().suspendUntil { - exists(IndicesExistsRequest(index).local(true), it) - } + val existsResponse: IndicesExistsResponse = + client.admin().indices().suspendUntil { + exists(IndicesExistsRequest(index).local(true), it) + } if (existsResponse.isExists) return true logger.debug("index: [$index] schema mappings: [$schemaMapping]") - val request = CreateIndexRequest(index) - .mapping(schemaMapping) - .settings(Settings.builder().put("index.hidden", true).build()) + val request = + CreateIndexRequest(index) + .mapping(schemaMapping) + .settings(Settings.builder().put("index.hidden", true).build()) if (alias != null) request.alias(Alias(alias)) return try { @@ -305,20 +311,24 @@ class CommentsIndices( } } - private suspend fun updateIndexMapping(index: String, mapping: String, alias: Boolean = false) { + private suspend fun updateIndexMapping( + index: String, + mapping: String, + alias: Boolean = false, + ) { val clusterState = clusterService.state() var targetIndex = index if (alias) { targetIndex = IndexUtils.getIndexNameWithAlias(clusterState, index) } - if (targetIndex == IndexUtils.lastUpdatedCommentsHistoryIndex - ) { + if (targetIndex == IndexUtils.lastUpdatedCommentsHistoryIndex) { return } - val putMappingRequest: PutMappingRequest = PutMappingRequest(targetIndex) - .source(mapping, XContentType.JSON) + val putMappingRequest: PutMappingRequest = + PutMappingRequest(targetIndex) + .source(mapping, XContentType.JSON) val updateResponse: AcknowledgedResponse = client.admin().indices().suspendUntil { putMapping(putMappingRequest, it) } if (updateResponse.isAcknowledged) { logger.info("Index mapping of $targetIndex is updated") @@ -328,7 +338,10 @@ class CommentsIndices( } } - private fun setIndexUpdateFlag(index: String, targetIndex: String) { + private fun setIndexUpdateFlag( + index: String, + targetIndex: String, + ) { when (index) { COMMENTS_HISTORY_WRITE_INDEX -> IndexUtils.lastUpdatedCommentsHistoryIndex = targetIndex } @@ -341,7 +354,7 @@ class CommentsIndices( map: String, docsCondition: Long, ageCondition: TimeValue, - writeIndex: String + writeIndex: String, ) { logger.info("in rolloverIndex, initialize: $initialized") if (!initialized) { @@ -351,7 +364,8 @@ class CommentsIndices( logger.info("sending rollover request") // We have to pass null for newIndexName in order to get Elastic to increment the index count. val request = RolloverRequest(index, null) - request.createIndexRequest.index(pattern) + request.createIndexRequest + .index(pattern) .mapping(map) .settings(Settings.builder().put("index.hidden", true).build()) request.addMaxIndexDocsCondition(docsCondition) @@ -367,25 +381,32 @@ class CommentsIndices( lastRolloverTime = TimeValue.timeValueMillis(threadPool.absoluteTimeInMillis()) } } + override fun onFailure(e: Exception) { logger.error("$writeIndex not roll over failed.") } - } + }, ) } - private fun deleteOldIndices(tag: String, indices: String) { - val clusterStateRequest = ClusterStateRequest() - .clear() - .indices(indices) - .metadata(true) - .local(true) - .indicesOptions(IndicesOptions.strictExpand()) + private fun deleteOldIndices( + tag: String, + indices: String, + ) { + val clusterStateRequest = + ClusterStateRequest() + .clear() + .indices(indices) + .metadata(true) + .local(true) + .indicesOptions(IndicesOptions.strictExpand()) client.admin().cluster().state( clusterStateRequest, object : ActionListener { override fun onResponse(clusterStateResponse: ClusterStateResponse) { - if (clusterStateResponse.state.metadata.indices.isNotEmpty()) { + if (clusterStateResponse.state.metadata.indices + .isNotEmpty() + ) { val indicesToDelete = getIndicesToDelete(clusterStateResponse) logger.info("Deleting old $tag indices viz $indicesToDelete") deleteAllOldHistoryIndices(indicesToDelete) @@ -393,10 +414,11 @@ class CommentsIndices( logger.info("No Old $tag Indices to delete") } } + override fun onFailure(e: Exception) { logger.error("Error fetching cluster state") } - } + }, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertContext.kt index 12fa1e0e0..380926cf1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertContext.kt @@ -17,32 +17,35 @@ data class AlertContext( val alert: Alert, val associatedQueries: List? = null, val sampleDocs: List>? = null, - val comments: List? = null + val comments: List? = null, ) { fun asTemplateArg(): Map { - val queriesContext = associatedQueries?.map { - mapOf( - DocLevelQuery.QUERY_ID_FIELD to it.id, - DocLevelQuery.NAME_FIELD to it.name, - DocLevelQuery.TAGS_FIELD to it.tags - ) - } + val queriesContext = + associatedQueries?.map { + mapOf( + DocLevelQuery.QUERY_ID_FIELD to it.id, + DocLevelQuery.NAME_FIELD to it.name, + DocLevelQuery.TAGS_FIELD to it.tags, + ) + } - val commentsContext = comments?.map { - mapOf( - Comment.COMMENT_CREATED_TIME_FIELD to it.createdTime, - Comment.COMMENT_LAST_UPDATED_TIME_FIELD to it.lastUpdatedTime, - Comment.COMMENT_CONTENT_FIELD to it.content, - Comment.COMMENT_USER_FIELD to it.user?.name - ) - } + val commentsContext = + comments?.map { + mapOf( + Comment.COMMENT_CREATED_TIME_FIELD to it.createdTime, + Comment.COMMENT_LAST_UPDATED_TIME_FIELD to it.lastUpdatedTime, + Comment.COMMENT_CONTENT_FIELD to it.content, + Comment.COMMENT_USER_FIELD to it.user?.name, + ) + } // Compile the custom context fields. - val customContextFields = mapOf( - ASSOCIATED_QUERIES_FIELD to queriesContext, - SAMPLE_DOCS_FIELD to sampleDocs, - COMMENTS_FIELD to commentsContext - ) + val customContextFields = + mapOf( + ASSOCIATED_QUERIES_FIELD to queriesContext, + SAMPLE_DOCS_FIELD to sampleDocs, + COMMENTS_FIELD to commentsContext, + ) // Get the alert template args val templateArgs = alert.asTemplateArg().toMutableMap() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt index 89430b9d1..89ae60653 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/AlertingConfigAccessor.kt @@ -25,8 +25,11 @@ import org.opensearch.transport.client.Client */ class AlertingConfigAccessor { companion object { - - suspend fun getEmailAccountInfo(client: Client, xContentRegistry: NamedXContentRegistry, emailAccountId: String): EmailAccount { + suspend fun getEmailAccountInfo( + client: Client, + xContentRegistry: NamedXContentRegistry, + emailAccountId: String, + ): EmailAccount { val source = getAlertingConfigDocumentSource(client, "Email account", emailAccountId) return withContext(Dispatchers.IO) { val xcp = XContentHelper.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, source, XContentType.JSON) @@ -35,7 +38,11 @@ class AlertingConfigAccessor { } } - suspend fun getEmailGroupInfo(client: Client, xContentRegistry: NamedXContentRegistry, emailGroupId: String): EmailGroup { + suspend fun getEmailGroupInfo( + client: Client, + xContentRegistry: NamedXContentRegistry, + emailGroupId: String, + ): EmailGroup { val source = getAlertingConfigDocumentSource(client, "Email group", emailGroupId) return withContext(Dispatchers.IO) { val xcp = XContentHelper.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, source, XContentType.JSON) @@ -47,7 +54,7 @@ class AlertingConfigAccessor { private suspend fun getAlertingConfigDocumentSource( client: Client, type: String, - docId: String + docId: String, ): BytesReference { val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, docId).routing(docId) val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Chime.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Chime.kt index 06d066ded..3d889f906 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Chime.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Chime.kt @@ -18,17 +18,21 @@ import java.io.IOException * A value object that represents a Chime message. Chime message will be * submitted to the Chime destination */ -data class Chime(val url: String) : ToXContent { - +data class Chime( + val url: String, +) : ToXContent { init { require(!Strings.isNullOrEmpty(url)) { "URL is null or empty" } } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject(TYPE) + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject(TYPE) .field(URL, url) .endObject() - } @Throws(IOException::class) fun writeTo(out: StreamOutput) { @@ -49,7 +53,10 @@ data class Chime(val url: String) : ToXContent { val fieldName = xcp.currentName() xcp.nextToken() when (fieldName) { - URL -> url = xcp.text() + URL -> { + url = xcp.text() + } + else -> { throw IllegalStateException("Unexpected field: $fieldName, while parsing Chime destination") } @@ -60,15 +67,17 @@ data class Chime(val url: String) : ToXContent { @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): Chime? { - return if (sin.readBoolean()) { + fun readFrom(sin: StreamInput): Chime? = + if (sin.readBoolean()) { Chime(sin.readString()) - } else null - } + } else { + null + } } // Complete JSON structure is now constructed in the notification plugin - fun constructMessageContent(subject: String?, message: String): String { - return if (Strings.isNullOrEmpty(subject)) message else "$subject \n\n $message" - } + fun constructMessageContent( + subject: String?, + message: String, + ): String = if (Strings.isNullOrEmpty(subject)) message else "$subject \n\n $message" } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/CustomWebhook.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/CustomWebhook.kt index 5758576d8..fb4b7c917 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/CustomWebhook.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/CustomWebhook.kt @@ -28,17 +28,20 @@ data class CustomWebhook( val queryParams: Map, val headerParams: Map, val username: String?, - val password: String? + val password: String?, ) : ToXContent { - init { require(!(Strings.isNullOrEmpty(url) && Strings.isNullOrEmpty(host))) { "Url or Host name must be provided." } } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject(TYPE) + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject(TYPE) .field(URL, url) .field(SCHEME_FIELD, scheme) .field(HOST_FIELD, host) @@ -50,7 +53,6 @@ data class CustomWebhook( .field(USERNAME_FIELD, username) .field(PASSWORD_FIELD, password) .endObject() - } @Throws(IOException::class) fun writeTo(out: StreamOutput) { @@ -98,16 +100,46 @@ data class CustomWebhook( val fieldName = xcp.currentName() xcp.nextToken() when (fieldName) { - URL -> url = xcp.textOrNull() - SCHEME_FIELD -> scheme = xcp.textOrNull() - HOST_FIELD -> host = xcp.textOrNull() - PORT_FIELD -> port = xcp.intValue() - PATH_FIELD -> path = xcp.textOrNull() - METHOD_FIELD -> method = xcp.textOrNull() - QUERY_PARAMS_FIELD -> queryParams = xcp.mapStrings() - HEADER_PARAMS_FIELD -> headerParams = xcp.mapStrings() - USERNAME_FIELD -> username = xcp.textOrNull() - PASSWORD_FIELD -> password = xcp.textOrNull() + URL -> { + url = xcp.textOrNull() + } + + SCHEME_FIELD -> { + scheme = xcp.textOrNull() + } + + HOST_FIELD -> { + host = xcp.textOrNull() + } + + PORT_FIELD -> { + port = xcp.intValue() + } + + PATH_FIELD -> { + path = xcp.textOrNull() + } + + METHOD_FIELD -> { + method = xcp.textOrNull() + } + + QUERY_PARAMS_FIELD -> { + queryParams = xcp.mapStrings() + } + + HEADER_PARAMS_FIELD -> { + headerParams = xcp.mapStrings() + } + + USERNAME_FIELD -> { + username = xcp.textOrNull() + } + + PASSWORD_FIELD -> { + password = xcp.textOrNull() + } + else -> { throw IllegalStateException("Unexpected field: $fieldName, while parsing custom webhook destination") } @@ -117,14 +149,12 @@ data class CustomWebhook( } @Suppress("UNCHECKED_CAST") - fun suppressWarning(map: MutableMap?): Map { - return map as Map - } + fun suppressWarning(map: MutableMap?): Map = map as Map @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): CustomWebhook? { - return if (sin.readBoolean()) { + fun readFrom(sin: StreamInput): CustomWebhook? = + if (sin.readBoolean()) { CustomWebhook( sin.readString(), // url sin.readOptionalString(), // scheme @@ -135,9 +165,10 @@ data class CustomWebhook( suppressWarning(sin.readMap()), // queryParams) suppressWarning(sin.readMap()), // headerParams) sin.readOptionalString(), // username - sin.readOptionalString() // password + sin.readOptionalString(), // password ) - } else null - } + } else { + null + } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt index a5043b3cf..8147ce467 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Destination.kt @@ -46,20 +46,27 @@ data class Destination( val chime: Chime?, val slack: Slack?, val customWebhook: CustomWebhook?, - val email: Email? + val email: Email?, ) : ToXContent { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = createXContentBuilder(builder, params, true) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, params, true) - } + fun toXContentWithUser( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = createXContentBuilder(builder, params, false) - fun toXContentWithUser(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, params, false) - } - private fun createXContentBuilder(builder: XContentBuilder, params: ToXContent.Params, secure: Boolean): XContentBuilder { + private fun createXContentBuilder( + builder: XContentBuilder, + params: ToXContent.Params, + secure: Boolean, + ): XContentBuilder { builder.startObject() if (params.paramAsBoolean("with_type", false)) builder.startObject(DESTINATION) - builder.field(ID_FIELD, id) + builder + .field(ID_FIELD, id) .field(TYPE_FIELD, type.value) .field(NAME_FIELD, name) @@ -67,7 +74,8 @@ data class Destination( builder.optionalUserField(USER_FIELD, user) } - builder.field(SCHEMA_VERSION, schemaVersion) + builder + .field(SCHEMA_VERSION, schemaVersion) .field(SEQ_NO_FIELD, seqNo) .field(PRIMARY_TERM_FIELD, primaryTerm) .optionalTimeField(LAST_UPDATE_TIME_FIELD, lastUpdateTime) @@ -75,9 +83,8 @@ data class Destination( if (params.paramAsBoolean("with_type", false)) builder.endObject() return builder.endObject() } - fun toXContent(builder: XContentBuilder): XContentBuilder { - return toXContent(builder, ToXContent.EMPTY_PARAMS) - } + + fun toXContent(builder: XContentBuilder): XContentBuilder = toXContent(builder, ToXContent.EMPTY_PARAMS) @Throws(IOException::class) fun writeTo(out: StreamOutput) { @@ -133,9 +140,8 @@ data class Destination( id: String = NO_ID, version: Long = NO_VERSION, seqNo: Int = NO_SEQ_NO, - primaryTerm: Int = NO_PRIMARY_TERM + primaryTerm: Int = NO_PRIMARY_TERM, ): Destination { - lateinit var name: String var user: User? = null lateinit var type: String @@ -152,8 +158,14 @@ data class Destination( xcp.nextToken() when (fieldName) { - NAME_FIELD -> name = xcp.text() - USER_FIELD -> user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) + NAME_FIELD -> { + name = xcp.text() + } + + USER_FIELD -> { + user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) + } + TYPE_FIELD -> { type = xcp.text() val allowedTypes = DestinationType.values().map { it.value } @@ -161,25 +173,35 @@ data class Destination( throw IllegalStateException("Type should be one of the $allowedTypes") } } - LAST_UPDATE_TIME_FIELD -> lastUpdateTime = xcp.instant() + + LAST_UPDATE_TIME_FIELD -> { + lastUpdateTime = xcp.instant() + } + CHIME -> { chime = Chime.parse(xcp) } + SLACK -> { slack = Slack.parse(xcp) } + CUSTOMWEBHOOK -> { customWebhook = CustomWebhook.parse(xcp) } + EMAIL -> { email = Email.parse(xcp) } + TEST_ACTION -> { // This condition is for integ tests to avoid parsing } + SCHEMA_VERSION -> { schemaVersion = xcp.intValue() } + else -> { xcp.skipChildren() } @@ -198,13 +220,17 @@ data class Destination( chime, slack, customWebhook, - email + email, ) } @JvmStatic @Throws(IOException::class) - fun parseWithType(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): Destination { + fun parseWithType( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): Destination { ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) @@ -215,8 +241,8 @@ data class Destination( @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): Destination { - return Destination( + fun readFrom(sin: StreamInput): Destination = + Destination( id = sin.readString(), version = sin.readLong(), schemaVersion = sin.readInt(), @@ -224,59 +250,76 @@ data class Destination( primaryTerm = sin.readInt(), type = sin.readEnum(DestinationType::class.java), name = sin.readString(), - user = if (sin.readBoolean()) { - User(sin) - } else null, + user = + if (sin.readBoolean()) { + User(sin) + } else { + null + }, lastUpdateTime = sin.readInstant(), chime = Chime.readFrom(sin), slack = Slack.readFrom(sin), customWebhook = CustomWebhook.readFrom(sin), - email = Email.readFrom(sin) + email = Email.readFrom(sin), ) - } } fun buildLegacyBaseMessage( compiledSubject: String?, compiledMessage: String, - destinationCtx: DestinationContext + destinationCtx: DestinationContext, ): LegacyBaseMessage { - val destinationMessage: LegacyBaseMessage when (type) { DestinationType.CHIME -> { val messageContent = chime?.constructMessageContent(compiledSubject, compiledMessage) - destinationMessage = LegacyChimeMessage.Builder(name) - .withUrl(chime?.url) - .withMessage(messageContent) - .build() + destinationMessage = + LegacyChimeMessage + .Builder(name) + .withUrl(chime?.url) + .withMessage(messageContent) + .build() } + DestinationType.SLACK -> { val messageContent = slack?.constructMessageContent(compiledSubject, compiledMessage) - destinationMessage = LegacySlackMessage.Builder(name) - .withUrl(slack?.url) - .withMessage(messageContent) - .build() + destinationMessage = + LegacySlackMessage + .Builder(name) + .withUrl(slack?.url) + .withMessage(messageContent) + .build() } + DestinationType.CUSTOM_WEBHOOK -> { - destinationMessage = LegacyCustomWebhookMessage.Builder(name) - .withUrl(getLegacyCustomWebhookMessageURL(customWebhook, compiledMessage)) - .withHeaderParams(customWebhook?.headerParams) - .withMessage(compiledMessage).build() + destinationMessage = + LegacyCustomWebhookMessage + .Builder(name) + .withUrl(getLegacyCustomWebhookMessageURL(customWebhook, compiledMessage)) + .withHeaderParams(customWebhook?.headerParams) + .withMessage(compiledMessage) + .build() } + DestinationType.EMAIL -> { val emailAccount = destinationCtx.emailAccount - destinationMessage = LegacyEmailMessage.Builder(name) - .withAccountName(emailAccount?.name) - .withHost(emailAccount?.host) - .withPort(emailAccount?.port) - .withMethod(emailAccount?.method?.let { convertAlertingToNotificationMethodType(it).toString() }) - .withFrom(emailAccount?.email) - .withRecipients(destinationCtx.recipients) - .withSubject(compiledSubject) - .withMessage(compiledMessage).build() + destinationMessage = + LegacyEmailMessage + .Builder(name) + .withAccountName(emailAccount?.name) + .withHost(emailAccount?.host) + .withPort(emailAccount?.port) + .withMethod(emailAccount?.method?.let { convertAlertingToNotificationMethodType(it).toString() }) + .withFrom(emailAccount?.email) + .withRecipients(destinationCtx.recipients) + .withSubject(compiledSubject) + .withMessage(compiledMessage) + .build() + } + + else -> { + throw IllegalArgumentException("Unsupported Destination type [$type] for building legacy message") } - else -> throw IllegalArgumentException("Unsupported Destination type [$type] for building legacy message") } return destinationMessage } @@ -296,8 +339,12 @@ data class Destination( return content } - private fun getLegacyCustomWebhookMessageURL(customWebhook: CustomWebhook?, message: String): String { - return LegacyCustomWebhookMessage.Builder(name) + private fun getLegacyCustomWebhookMessageURL( + customWebhook: CustomWebhook?, + message: String, + ): String = + LegacyCustomWebhookMessage + .Builder(name) .withUrl(customWebhook?.url) .withScheme(customWebhook?.scheme) .withHost(customWebhook?.host) @@ -305,6 +352,7 @@ data class Destination( .withPath(customWebhook?.path) .withQueryParams(customWebhook?.queryParams) .withMessage(message) - .build().uri.toString() - } + .build() + .uri + .toString() } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContext.kt index 5b3febc87..cc2128064 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContext.kt @@ -13,5 +13,5 @@ import org.opensearch.alerting.model.destination.email.EmailAccount */ data class DestinationContext( val emailAccount: EmailAccount? = null, - val recipients: List = emptyList() + val recipients: List = emptyList(), ) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContextFactory.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContextFactory.kt index b72711606..a285427cc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContextFactory.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/DestinationContextFactory.kt @@ -21,9 +21,8 @@ import org.opensearch.transport.client.Client class DestinationContextFactory( val client: Client, val xContentRegistry: NamedXContentRegistry, - private var destinationSettings: Map + private var destinationSettings: Map, ) { - fun updateDestinationSettings(destinationSettings: Map) { this.destinationSettings = destinationSettings } @@ -67,7 +66,10 @@ class DestinationContextFactory( when (recipient.type) { // Recipient attributes are checked for being non-null based on type during initialization // so non-null assertion calls are made here - Recipient.RecipientType.EMAIL -> uniqueRecipients.add(recipient.email!!) + Recipient.RecipientType.EMAIL -> { + uniqueRecipients.add(recipient.email!!) + } + Recipient.RecipientType.EMAIL_GROUP -> { val emailGroup = AlertingConfigAccessor.getEmailGroupInfo(client, xContentRegistry, recipient.emailGroupID!!) emailGroup.getEmailsAsListOfString().map { uniqueRecipients.add(it) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/SNS.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/SNS.kt index f9c6ec59f..c4ae094d2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/SNS.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/SNS.kt @@ -13,22 +13,26 @@ import java.io.IOException import java.lang.IllegalStateException import java.util.regex.Pattern -data class SNS(val topicARN: String, val roleARN: String) : ToXContent { - +data class SNS( + val topicARN: String, + val roleARN: String, +) : ToXContent { init { require(SNS_ARN_REGEX.matcher(topicARN).find()) { "Invalid AWS SNS topic ARN: $topicARN" } require(IAM_ARN_REGEX.matcher(roleARN).find()) { "Invalid AWS role ARN: $roleARN " } } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject(SNS_TYPE) + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject(SNS_TYPE) .field(TOPIC_ARN_FIELD, topicARN) .field(ROLE_ARN_FIELD, roleARN) .endObject() - } companion object { - private val SNS_ARN_REGEX = Pattern.compile("^arn:aws(-[^:]+)?:sns:([a-zA-Z0-9-]+):([0-9]{12}):([a-zA-Z0-9-_]+)$") private val IAM_ARN_REGEX = Pattern.compile("^arn:aws(-[^:]+)?:iam::([0-9]{12}):([a-zA-Z0-9-/_]+)$") @@ -47,8 +51,14 @@ data class SNS(val topicARN: String, val roleARN: String) : ToXContent { val fieldName = xcp.currentName() xcp.nextToken() when (fieldName) { - TOPIC_ARN_FIELD -> topicARN = xcp.textOrNull() - ROLE_ARN_FIELD -> roleARN = xcp.textOrNull() + TOPIC_ARN_FIELD -> { + topicARN = xcp.textOrNull() + } + + ROLE_ARN_FIELD -> { + roleARN = xcp.textOrNull() + } + else -> { throw IllegalStateException("Unexpected field: $fieldName, while parsing SNS destination") } @@ -56,7 +66,7 @@ data class SNS(val topicARN: String, val roleARN: String) : ToXContent { } return SNS( requireNotNull(topicARN) { "SNS Action topic_arn is null" }, - requireNotNull(roleARN) { "SNS Action role_arn is null" } + requireNotNull(roleARN) { "SNS Action role_arn is null" }, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Slack.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Slack.kt index 14f623616..e325cdf22 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Slack.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/Slack.kt @@ -18,17 +18,21 @@ import java.io.IOException * A value object that represents a Slack message. Slack message will be * submitted to the Slack destination */ -data class Slack(val url: String) : ToXContent { - +data class Slack( + val url: String, +) : ToXContent { init { require(!Strings.isNullOrEmpty(url)) { "URL is null or empty" } } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject(TYPE) + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject(TYPE) .field(URL, url) .endObject() - } @Throws(IOException::class) fun writeTo(out: StreamOutput) { @@ -49,7 +53,10 @@ data class Slack(val url: String) : ToXContent { val fieldName = xcp.currentName() xcp.nextToken() when (fieldName) { - URL -> url = xcp.text() + URL -> { + url = xcp.text() + } + else -> { throw IllegalStateException("Unexpected field: $fieldName, while parsing Slack destination") } @@ -60,15 +67,17 @@ data class Slack(val url: String) : ToXContent { @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): Slack? { - return if (sin.readBoolean()) { + fun readFrom(sin: StreamInput): Slack? = + if (sin.readBoolean()) { Slack(sin.readString()) - } else null - } + } else { + null + } } // Complete JSON structure is now constructed in the notification plugin - fun constructMessageContent(subject: String?, message: String): String { - return if (Strings.isNullOrEmpty(subject)) message else "$subject \n\n $message" - } + fun constructMessageContent( + subject: String?, + message: String, + ): String = if (Strings.isNullOrEmpty(subject)) message else "$subject \n\n $message" } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/Email.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/Email.kt index 75635ab38..d0fceb4a3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/Email.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/Email.kt @@ -24,19 +24,22 @@ import java.util.Locale */ data class Email( val emailAccountID: String, - val recipients: List -) : Writeable, ToXContent { - + val recipients: List, +) : Writeable, + ToXContent { init { require(recipients.isNotEmpty()) { "At least one recipient must be provided" } } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject(TYPE) + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject(TYPE) .field(EMAIL_ACCOUNT_ID_FIELD, emailAccountID) .field(RECIPIENTS_FIELD, recipients.toTypedArray()) .endObject() - } @Throws(IOException::class) override fun writeTo(out: StreamOutput) { @@ -61,13 +64,17 @@ data class Email( xcp.nextToken() when (fieldName) { - EMAIL_ACCOUNT_ID_FIELD -> emailAccountID = xcp.text() + EMAIL_ACCOUNT_ID_FIELD -> { + emailAccountID = xcp.text() + } + RECIPIENTS_FIELD -> { ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) while (xcp.nextToken() != Token.END_ARRAY) { recipients.add(Recipient.parse(xcp)) } } + else -> { throw IllegalStateException("Unexpected field: $fieldName, while parsing email destination") } @@ -76,20 +83,21 @@ data class Email( return Email( requireNotNull(emailAccountID) { "Email account ID is null" }, - recipients + recipients, ) } @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): Email? { - return if (sin.readBoolean()) { + fun readFrom(sin: StreamInput): Email? = + if (sin.readBoolean()) { Email( sin.readString(), // emailAccountID - sin.readList(::Recipient) // recipients + sin.readList(::Recipient), // recipients ) - } else null - } + } else { + null + } } } @@ -99,12 +107,15 @@ data class Email( data class Recipient( val type: RecipientType, val emailGroupID: String?, - val email: String? -) : Writeable, ToXContent { - + val email: String?, +) : Writeable, + ToXContent { init { when (type) { - RecipientType.EMAIL_GROUP -> requireNotNull(emailGroupID) { "Email group ID is null" } + RecipientType.EMAIL_GROUP -> { + requireNotNull(emailGroupID) { "Email group ID is null" } + } + RecipientType.EMAIL -> { requireNotNull(email) { "Email is null" } require(isValidEmail(email)) { "Invalid email" } @@ -116,10 +127,13 @@ data class Recipient( constructor(sin: StreamInput) : this( sin.readEnum(Recipient.RecipientType::class.java), // type sin.readOptionalString(), // emailGroupId - sin.readOptionalString() // email + sin.readOptionalString(), // email ) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject().field(TYPE_FIELD, type.value) when (type) { @@ -137,9 +151,11 @@ data class Recipient( out.writeOptionalString(email) } - enum class RecipientType(val value: String) { + enum class RecipientType( + val value: String, + ) { EMAIL("email"), - EMAIL_GROUP("email_group") + EMAIL_GROUP("email_group"), } companion object { @@ -167,22 +183,26 @@ data class Recipient( throw IllegalStateException("Type should be one of $allowedTypes") } } - EMAIL_GROUP_ID_FIELD -> emailGroupID = xcp.text() - EMAIL_FIELD -> email = xcp.text() + + EMAIL_GROUP_ID_FIELD -> { + emailGroupID = xcp.text() + } + + EMAIL_FIELD -> { + email = xcp.text() + } } } return Recipient( RecipientType.valueOf(type.uppercase(Locale.ROOT)), emailGroupID, - email + email, ) } @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): Recipient { - return Recipient(sin) - } + fun readFrom(sin: StreamInput): Recipient = Recipient(sin) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt index 9d0bb7f00..d1497ccca 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailAccount.kt @@ -32,9 +32,9 @@ data class EmailAccount( val port: Int, val method: MethodType, val username: SecureString? = null, - val password: SecureString? = null -) : Writeable, ToXContent { - + val password: SecureString? = null, +) : Writeable, + ToXContent { init { // Excluding dashes (-) from valid names for EmailAccount since the name is used // to namespace the associated OpenSearch keystore settings and dashes do not work for those settings. @@ -46,10 +46,14 @@ data class EmailAccount( require(isValidEmail(email)) { "Invalid email" } } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject() if (params.paramAsBoolean("with_type", false)) builder.startObject(EMAIL_ACCOUNT_TYPE) - builder.field(SCHEMA_VERSION, schemaVersion) + builder + .field(SCHEMA_VERSION, schemaVersion) .field(NAME_FIELD, name) .field(EMAIL_FIELD, email) .field(HOST_FIELD, host) @@ -59,9 +63,7 @@ data class EmailAccount( return builder.endObject() } - fun toXContent(builder: XContentBuilder): XContentBuilder { - return toXContent(builder, ToXContent.EMPTY_PARAMS) - } + fun toXContent(builder: XContentBuilder): XContentBuilder = toXContent(builder, ToXContent.EMPTY_PARAMS) @Throws(IOException::class) override fun writeTo(out: StreamOutput) { @@ -77,13 +79,17 @@ data class EmailAccount( out.writeOptionalSecureString(password) } - enum class MethodType(val value: String) { + enum class MethodType( + val value: String, + ) { NONE("none"), SSL("ssl"), - TLS("starttls"); + TLS("starttls"), + ; companion object { private val values = values() + // Created this method since MethodType value does not necessarily match enum name fun getByValue(value: String) = values.firstOrNull { it.value == value } } @@ -102,7 +108,11 @@ data class EmailAccount( @JvmStatic @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): EmailAccount { + fun parse( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): EmailAccount { var schemaVersion = NO_SCHEMA_VERSION lateinit var name: String lateinit var email: String @@ -116,11 +126,26 @@ data class EmailAccount( xcp.nextToken() when (fieldName) { - SCHEMA_VERSION -> schemaVersion = xcp.intValue() - NAME_FIELD -> name = xcp.text() - EMAIL_FIELD -> email = xcp.text() - HOST_FIELD -> host = xcp.text() - PORT_FIELD -> port = xcp.intValue() + SCHEMA_VERSION -> { + schemaVersion = xcp.intValue() + } + + NAME_FIELD -> { + name = xcp.text() + } + + EMAIL_FIELD -> { + email = xcp.text() + } + + HOST_FIELD -> { + host = xcp.text() + } + + PORT_FIELD -> { + port = xcp.intValue() + } + METHOD_FIELD -> { method = xcp.text() val allowedMethods = MethodType.values().map { it.value } @@ -139,13 +164,17 @@ data class EmailAccount( email, host, port, - requireNotNull(MethodType.getByValue(method)) { "Method type was null" } + requireNotNull(MethodType.getByValue(method)) { "Method type was null" }, ) } @JvmStatic @Throws(IOException::class) - fun parseWithType(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): EmailAccount { + fun parseWithType( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): EmailAccount { ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) ensureExpectedToken(Token.FIELD_NAME, xcp.nextToken(), xcp) ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) @@ -156,8 +185,8 @@ data class EmailAccount( @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): EmailAccount { - return EmailAccount( + fun readFrom(sin: StreamInput): EmailAccount = + EmailAccount( sin.readString(), // id sin.readLong(), // version sin.readInt(), // schemaVersion @@ -167,8 +196,7 @@ data class EmailAccount( sin.readInt(), // port sin.readEnum(MethodType::class.java), // method sin.readOptionalSecureString(), // username - sin.readOptionalSecureString() // password + sin.readOptionalSecureString(), // password ) - } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt index a960da5f5..aba8d0d31 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/model/destination/email/EmailGroup.kt @@ -26,9 +26,9 @@ data class EmailGroup( val version: Long = NO_VERSION, val schemaVersion: Int = NO_SCHEMA_VERSION, val name: String, - val emails: List -) : Writeable, ToXContent { - + val emails: List, +) : Writeable, + ToXContent { init { val validNamePattern = Regex("[A-Z0-9_-]+", RegexOption.IGNORE_CASE) require(validNamePattern.matches(name)) { @@ -36,19 +36,21 @@ data class EmailGroup( } } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject() if (params.paramAsBoolean("with_type", false)) builder.startObject(EMAIL_GROUP_TYPE) - builder.field(SCHEMA_VERSION, schemaVersion) + builder + .field(SCHEMA_VERSION, schemaVersion) .field(NAME_FIELD, name) .field(EMAILS_FIELD, emails.toTypedArray()) if (params.paramAsBoolean("with_type", false)) builder.endObject() return builder.endObject() } - fun toXContent(builder: XContentBuilder): XContentBuilder { - return toXContent(builder, ToXContent.EMPTY_PARAMS) - } + fun toXContent(builder: XContentBuilder): XContentBuilder = toXContent(builder, ToXContent.EMPTY_PARAMS) @Throws(IOException::class) override fun writeTo(out: StreamOutput) { @@ -75,7 +77,11 @@ data class EmailGroup( @JvmStatic @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): EmailGroup { + fun parse( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): EmailGroup { var schemaVersion = NO_SCHEMA_VERSION lateinit var name: String val emails: MutableList = mutableListOf() @@ -86,14 +92,21 @@ data class EmailGroup( xcp.nextToken() when (fieldName) { - SCHEMA_VERSION -> schemaVersion = xcp.intValue() - NAME_FIELD -> name = xcp.text() + SCHEMA_VERSION -> { + schemaVersion = xcp.intValue() + } + + NAME_FIELD -> { + name = xcp.text() + } + EMAILS_FIELD -> { ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) while (xcp.nextToken() != Token.END_ARRAY) { emails.add(EmailEntry.parse(xcp)) } } + else -> { throw IllegalStateException("Unexpected field: $fieldName, while parsing email group") } @@ -105,13 +118,17 @@ data class EmailGroup( version, schemaVersion, requireNotNull(name) { "Email group name is null" }, - emails + emails, ) } @JvmStatic @Throws(IOException::class) - fun parseWithType(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): EmailGroup { + fun parseWithType( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): EmailGroup { ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) ensureExpectedToken(Token.FIELD_NAME, xcp.nextToken(), xcp) ensureExpectedToken(Token.START_OBJECT, xcp.nextToken(), xcp) @@ -122,20 +139,21 @@ data class EmailGroup( @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): EmailGroup { - return EmailGroup( + fun readFrom(sin: StreamInput): EmailGroup = + EmailGroup( sin.readString(), // id sin.readLong(), // version sin.readInt(), // schemaVersion sin.readString(), // name - sin.readList(::EmailEntry) // emails + sin.readList(::EmailEntry), // emails ) - } } } -data class EmailEntry(val email: String) : Writeable, ToXContent { - +data class EmailEntry( + val email: String, +) : Writeable, + ToXContent { init { require(!Strings.isEmpty(email)) { "Email entry must have a non-empty email" } require(isValidEmail(email)) { "Invalid email" } @@ -143,14 +161,17 @@ data class EmailEntry(val email: String) : Writeable, ToXContent { @Throws(IOException::class) constructor(sin: StreamInput) : this( - sin.readString() // email + sin.readString(), // email ) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = + builder + .startObject() .field(EMAIL_FIELD, email) .endObject() - } @Throws(IOException::class) override fun writeTo(out: StreamOutput) { @@ -171,7 +192,10 @@ data class EmailEntry(val email: String) : Writeable, ToXContent { xcp.nextToken() when (fieldName) { - EMAIL_FIELD -> email = xcp.text() + EMAIL_FIELD -> { + email = xcp.text() + } + else -> { throw IllegalStateException("Unexpected field: $fieldName, while parsing email entry") } @@ -183,8 +207,6 @@ data class EmailEntry(val email: String) : Writeable, ToXContent { @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): EmailEntry { - return EmailEntry(sin) - } + fun readFrom(sin: StreamInput): EmailEntry = EmailEntry(sin) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/AlertV2.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/AlertV2.kt index af188f180..9318abce7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/AlertV2.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/AlertV2.kt @@ -66,8 +66,9 @@ data class AlertV2( val triggeredTime: Instant, val errorMessage: String? = null, val severity: Severity, - val executionId: String? = null -) : Writeable, ToXContent { + val executionId: String? = null, +) : Writeable, + ToXContent { @Throws(IOException::class) constructor(sin: StreamInput) : this( id = sin.readString(), @@ -76,11 +77,12 @@ data class AlertV2( monitorId = sin.readString(), monitorName = sin.readString(), monitorVersion = sin.readLong(), - monitorUser = if (sin.readBoolean()) { - User(sin) - } else { - null - }, + monitorUser = + if (sin.readBoolean()) { + User(sin) + } else { + null + }, triggerId = sin.readString(), triggerName = sin.readString(), query = sin.readString(), @@ -88,7 +90,7 @@ data class AlertV2( triggeredTime = sin.readInstant(), errorMessage = sin.readOptionalString(), severity = sin.readEnum(Severity::class.java), - executionId = sin.readOptionalString() + executionId = sin.readOptionalString(), ) @Throws(IOException::class) @@ -111,16 +113,19 @@ data class AlertV2( out.writeOptionalString(executionId) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, false) - } + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = createXContentBuilder(builder, false) - fun toXContentWithUser(builder: XContentBuilder): XContentBuilder { - return createXContentBuilder(builder, true) - } + fun toXContentWithUser(builder: XContentBuilder): XContentBuilder = createXContentBuilder(builder, true) - private fun createXContentBuilder(builder: XContentBuilder, withUser: Boolean): XContentBuilder { - builder.startObject() + private fun createXContentBuilder( + builder: XContentBuilder, + withUser: Boolean, + ): XContentBuilder { + builder + .startObject() .field(ALERT_V2_ID_FIELD, id) .field(ALERT_V2_VERSION_FIELD, version) .field(MONITOR_V2_ID_FIELD, monitorId) @@ -145,15 +150,14 @@ data class AlertV2( return builder } - fun asTemplateArg(): Map { - return mapOf( + fun asTemplateArg(): Map = + mapOf( ALERT_V2_ID_FIELD to id, ALERT_V2_VERSION_FIELD to version, ERROR_MESSAGE_FIELD to errorMessage, EXECUTION_ID_FIELD to executionId, - SEVERITY_FIELD to severity.value + SEVERITY_FIELD to severity.value, ) - } companion object { const val ALERT_V2_ID_FIELD = "id" @@ -179,7 +183,11 @@ data class AlertV2( @JvmStatic @JvmOverloads @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): AlertV2 { + fun parse( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): AlertV2 { var schemaVersion = NO_SCHEMA_VERSION lateinit var monitorId: String lateinit var monitorName: String @@ -200,30 +208,67 @@ data class AlertV2( xcp.nextToken() when (fieldName) { - MONITOR_V2_ID_FIELD -> monitorId = xcp.text() - SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() - MONITOR_V2_NAME_FIELD -> monitorName = xcp.text() - MONITOR_V2_VERSION_FIELD -> monitorVersion = xcp.longValue() - MONITOR_V2_USER_FIELD -> - monitorUser = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { - null - } else { - User.parse(xcp) - } - TRIGGER_V2_ID_FIELD -> triggerId = xcp.text() - TRIGGER_V2_NAME_FIELD -> triggerName = xcp.text() - QUERY_FIELD -> query = xcp.text() - QUERY_RESULTS_FIELD -> queryResults = xcp.map() - TRIGGERED_TIME_FIELD -> triggeredTime = xcp.instant() - ERROR_MESSAGE_FIELD -> errorMessage = xcp.textOrNull() - EXECUTION_ID_FIELD -> executionId = xcp.textOrNull() + MONITOR_V2_ID_FIELD -> { + monitorId = xcp.text() + } + + SCHEMA_VERSION_FIELD -> { + schemaVersion = xcp.intValue() + } + + MONITOR_V2_NAME_FIELD -> { + monitorName = xcp.text() + } + + MONITOR_V2_VERSION_FIELD -> { + monitorVersion = xcp.longValue() + } + + MONITOR_V2_USER_FIELD -> { + monitorUser = + if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + null + } else { + User.parse(xcp) + } + } + + TRIGGER_V2_ID_FIELD -> { + triggerId = xcp.text() + } + + TRIGGER_V2_NAME_FIELD -> { + triggerName = xcp.text() + } + + QUERY_FIELD -> { + query = xcp.text() + } + + QUERY_RESULTS_FIELD -> { + queryResults = xcp.map() + } + + TRIGGERED_TIME_FIELD -> { + triggeredTime = xcp.instant() + } + + ERROR_MESSAGE_FIELD -> { + errorMessage = xcp.textOrNull() + } + + EXECUTION_ID_FIELD -> { + executionId = xcp.textOrNull() + } + TriggerV2.SEVERITY_FIELD -> { val input = xcp.text() - val enumMatchResult = Severity.enumFromString(input) - ?: throw IllegalArgumentException( - "Invalid value for ${TriggerV2.SEVERITY_FIELD}: $input. " + - "Supported values are ${Severity.entries.map { it.value }}" - ) + val enumMatchResult = + Severity.enumFromString(input) + ?: throw IllegalArgumentException( + "Invalid value for ${TriggerV2.SEVERITY_FIELD}: $input. " + + "Supported values are ${Severity.entries.map { it.value }}", + ) severity = enumMatchResult } } @@ -244,14 +289,12 @@ data class AlertV2( triggeredTime = requireNotNull(triggeredTime), errorMessage = errorMessage, severity = severity, - executionId = executionId + executionId = executionId, ) } @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): AlertV2 { - return AlertV2(sin) - } + fun readFrom(sin: StreamInput): AlertV2 = AlertV2(sin) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2.kt index d7b1ef16c..208c9d03c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2.kt @@ -43,7 +43,10 @@ interface MonitorV2 : ScheduledJob { fun asTemplateArg(): Map - fun toXContentWithUser(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder + fun toXContentWithUser( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder fun makeCopy( id: String = this.id, @@ -59,20 +62,19 @@ interface MonitorV2 : ScheduledJob { // triggers can be copied at instance-level data class copy() schemaVersion: Int = this.schemaVersion, lookBackWindow: Long? = this.lookBackWindow, - timestampField: String? = this.timestampField + timestampField: String? = this.timestampField, ): MonitorV2 - enum class MonitorV2Type(val value: String) { - PPL_SQL_MONITOR(PPL_SQL_MONITOR_TYPE); + enum class MonitorV2Type( + val value: String, + ) { + PPL_SQL_MONITOR(PPL_SQL_MONITOR_TYPE), + ; - override fun toString(): String { - return value - } + override fun toString(): String = value companion object { - fun enumFromString(value: String): MonitorV2Type? { - return MonitorV2Type.entries.find { it.value == value } - } + fun enumFromString(value: String): MonitorV2Type? = MonitorV2Type.entries.find { it.value == value } } } @@ -104,31 +106,37 @@ interface MonitorV2 : ScheduledJob { const val UUID_LENGTH = 20 // the length of a UUID generated by UUIDs.base64UUID() const val DESCRIPTION_MAX_LENGTH = 2000 - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( - ScheduledJob::class.java, - ParseField(MONITOR_V2_TYPE), - CheckedFunction { parse(it) } - ) + val XCONTENT_REGISTRY = + NamedXContentRegistry.Entry( + ScheduledJob::class.java, + ParseField(MONITOR_V2_TYPE), + CheckedFunction { parse(it) }, + ) @JvmStatic @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): MonitorV2 { - /* parse outer object for monitorV2 type, then delegate to correct monitorV2 parser */ + fun parse( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): MonitorV2 { + // parse outer object for monitorV2 type, then delegate to correct monitorV2 parser XContentParserUtils.ensureExpectedToken( // outer monitor object start XContentParser.Token.START_OBJECT, xcp.currentToken(), - xcp + xcp, ) // monitor type field name XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) val monitorTypeText = xcp.currentName() - val monitorType = MonitorV2Type.enumFromString(monitorTypeText) - ?: throw IllegalStateException( - "when parsing MonitorV2, received invalid monitor type: $monitorTypeText. " + - "Please ensure monitor object is wrapped in an outer $PPL_SQL_MONITOR_TYPE object" - ) + val monitorType = + MonitorV2Type.enumFromString(monitorTypeText) + ?: throw IllegalStateException( + "when parsing MonitorV2, received invalid monitor type: $monitorTypeText. " + + "Please ensure monitor object is wrapped in an outer $PPL_SQL_MONITOR_TYPE object", + ) // inner monitor object start XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) @@ -138,14 +146,16 @@ interface MonitorV2 : ScheduledJob { } } - fun readFrom(sin: StreamInput): MonitorV2 { - return when (val monitorType = sin.readEnum(MonitorV2Type::class.java)) { + fun readFrom(sin: StreamInput): MonitorV2 = + when (val monitorType = sin.readEnum(MonitorV2Type::class.java)) { MonitorV2Type.PPL_SQL_MONITOR -> PPLSQLMonitor(sin) else -> throw IllegalStateException("Unexpected input \"$monitorType\" when reading MonitorV2") } - } - fun writeTo(out: StreamOutput, monitorV2: MonitorV2) { + fun writeTo( + out: StreamOutput, + monitorV2: MonitorV2, + ) { when (monitorV2) { is PPLSQLMonitor -> { out.writeEnum(MonitorV2Type.PPL_SQL_MONITOR) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2RunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2RunResult.kt index 28d91b297..79088eb49 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2RunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/MonitorV2RunResult.kt @@ -16,13 +16,15 @@ import org.opensearch.core.xcontent.ToXContent * * @opensearch.experimental */ -interface MonitorV2RunResult : Writeable, ToXContent { +interface MonitorV2RunResult : + Writeable, + ToXContent { val monitorName: String val error: Exception? val triggerResults: Map enum class MonitorV2RunResultType { - PPL_SQL_MONITOR_RUN_RESULT; + PPL_SQL_MONITOR_RUN_RESULT, } companion object { @@ -37,7 +39,10 @@ interface MonitorV2RunResult : Writeable, } } - fun writeTo(out: StreamOutput, monitorV2RunResult: MonitorV2RunResult<*>) { + fun writeTo( + out: StreamOutput, + monitorV2RunResult: MonitorV2RunResult<*>, + ) { when (monitorV2RunResult) { is PPLSQLMonitorRunResult -> { out.writeEnum(MonitorV2RunResultType.PPL_SQL_MONITOR_RUN_RESULT) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitor.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitor.kt index 8b39069ae..051fa1cf7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitor.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitor.kt @@ -74,13 +74,15 @@ data class PPLSQLMonitor( override val triggers: List, override val schemaVersion: Int = IndexUtils.NO_SCHEMA_VERSION, val queryLanguage: QueryLanguage = QueryLanguage.PPL, // default to PPL, SQL not currently supported - val query: String + val query: String, ) : MonitorV2 { - // specify scheduled job type override val type = MonitorV2.MONITOR_V2_TYPE - override fun fromDocument(id: String, version: Long): PPLSQLMonitor = copy(id = id, version = version) + override fun fromDocument( + id: String, + version: Long, + ): PPLSQLMonitor = copy(id = id, version = version) init { // SQL monitors are not yet supported @@ -136,26 +138,33 @@ data class PPLSQLMonitor( lastUpdateTime = sin.readInstant(), enabledTime = sin.readOptionalInstant(), description = sin.readOptionalString(), - user = if (sin.readBoolean()) { - User(sin) - } else { - null - }, + user = + if (sin.readBoolean()) { + User(sin) + } else { + null + }, triggers = sin.readList(PPLSQLTrigger.Companion::readFrom), schemaVersion = sin.readInt(), queryLanguage = sin.readEnum(QueryLanguage::class.java), - query = sin.readString() + query = sin.readString(), ) - override fun toXContentWithUser(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, params, true) - } - - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - return createXContentBuilder(builder, params, false) - } - - private fun createXContentBuilder(builder: XContentBuilder, params: ToXContent.Params, withUser: Boolean): XContentBuilder { + override fun toXContentWithUser( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = createXContentBuilder(builder, params, true) + + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder = createXContentBuilder(builder, params, false) + + private fun createXContentBuilder( + builder: XContentBuilder, + params: ToXContent.Params, + withUser: Boolean, + ): XContentBuilder { builder.startObject() // overall start object // if this is being written as ScheduledJob, add extra object layer and add ScheduledJob @@ -228,8 +237,8 @@ data class PPLSQLMonitor( out.writeString(query) } - override fun asTemplateArg(): Map { - return mapOf( + override fun asTemplateArg(): Map = + mapOf( IndexUtils._ID to id, IndexUtils._VERSION to version, NAME_FIELD to name, @@ -238,9 +247,8 @@ data class PPLSQLMonitor( LOOK_BACK_WINDOW_FIELD to lookBackWindow, LAST_UPDATE_TIME_FIELD to lastUpdateTime.toEpochMilli(), ENABLED_TIME_FIELD to enabledTime?.toEpochMilli(), - QUERY_FIELD to query + QUERY_FIELD to query, ) - } override fun makeCopy( id: String, @@ -254,9 +262,9 @@ data class PPLSQLMonitor( user: User?, schemaVersion: Int, lookBackWindow: Long?, - timestampField: String? - ): PPLSQLMonitor { - return copy( + timestampField: String?, + ): PPLSQLMonitor = + copy( id = id, version = version, name = name, @@ -268,13 +276,15 @@ data class PPLSQLMonitor( user = user, schemaVersion = schemaVersion, lookBackWindow = lookBackWindow, - timestampField = timestampField + timestampField = timestampField, ) - } - enum class QueryLanguage(val value: String) { + enum class QueryLanguage( + val value: String, + ) { PPL(PPL_QUERY_LANGUAGE), - SQL(SQL_QUERY_LANGUAGE); + SQL(SQL_QUERY_LANGUAGE), + ; companion object { fun enumFromString(value: String): QueryLanguage? = QueryLanguage.entries.firstOrNull { it.value == value } @@ -296,7 +306,11 @@ data class PPLSQLMonitor( @JvmStatic @JvmOverloads @Throws(IOException::class) - fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): PPLSQLMonitor { + fun parse( + xcp: XContentParser, + id: String = NO_ID, + version: Long = NO_VERSION, + ): PPLSQLMonitor { var name: String? = null var enabled = true var schedule: Schedule? = null @@ -311,66 +325,96 @@ data class PPLSQLMonitor( var queryLanguage: QueryLanguage = QueryLanguage.PPL // default to PPL var query: String? = null - /* parse */ + // parse XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { val fieldName = xcp.currentName() xcp.nextToken() when (fieldName) { - NAME_FIELD -> name = xcp.text() - ENABLED_FIELD -> enabled = xcp.booleanValue() - SCHEDULE_FIELD -> schedule = Schedule.parse(xcp) + NAME_FIELD -> { + name = xcp.text() + } + + ENABLED_FIELD -> { + enabled = xcp.booleanValue() + } + + SCHEDULE_FIELD -> { + schedule = Schedule.parse(xcp) + } + LOOK_BACK_WINDOW_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { lookBackWindow = xcp.longValue() } } + TIMESTAMP_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { timestampField = xcp.text() } } - LAST_UPDATE_TIME_FIELD -> lastUpdateTime = xcp.instant() - ENABLED_TIME_FIELD -> enabledTime = xcp.instant() + + LAST_UPDATE_TIME_FIELD -> { + lastUpdateTime = xcp.instant() + } + + ENABLED_TIME_FIELD -> { + enabledTime = xcp.instant() + } + DESCRIPTION_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { description = xcp.text() } } + USER_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { user = User.parse(xcp) } } + TRIGGERS_FIELD -> { XContentParserUtils.ensureExpectedToken( XContentParser.Token.START_ARRAY, xcp.currentToken(), - xcp + xcp, ) while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { triggers.add(PPLSQLTrigger.parseInner(xcp)) } } - SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() + + SCHEMA_VERSION_FIELD -> { + schemaVersion = xcp.intValue() + } + QUERY_LANGUAGE_FIELD -> { val input = xcp.text() - val enumMatchResult = QueryLanguage.enumFromString(input) - ?: throw AlertingException.wrap( - IllegalArgumentException( - "Invalid value for $QUERY_LANGUAGE_FIELD: $input. " + - "Supported values are ${QueryLanguage.entries.map { it.value }}" + val enumMatchResult = + QueryLanguage.enumFromString(input) + ?: throw AlertingException.wrap( + IllegalArgumentException( + "Invalid value for $QUERY_LANGUAGE_FIELD: $input. " + + "Supported values are ${QueryLanguage.entries.map { it.value }}", + ), ) - ) queryLanguage = enumMatchResult } - QUERY_FIELD -> query = xcp.text() - else -> throw IllegalArgumentException("Unexpected field when parsing PPL/SQL Monitor: $fieldName") + + QUERY_FIELD -> { + query = xcp.text() + } + + else -> { + throw IllegalArgumentException("Unexpected field when parsing PPL/SQL Monitor: $fieldName") + } } } - /* validations */ + // validations // if enabled, set time of MonitorV2 creation/update is set as enable time if (enabled && enabledTime == null) { @@ -387,7 +431,7 @@ data class PPLSQLMonitor( requireNotNull(query) { "Query is null" } requireNotNull(lastUpdateTime) { "Last update time is null" } - /* return PPLSQLMonitor */ + // return PPLSQLMonitor return PPLSQLMonitor( id, version, @@ -403,7 +447,7 @@ data class PPLSQLMonitor( triggers, schemaVersion, queryLanguage, - query + query, ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitorRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitorRunResult.kt index 12a34c560..1a187cb89 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitorRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLMonitorRunResult.kt @@ -26,19 +26,21 @@ data class PPLSQLMonitorRunResult( override val monitorName: String, override val error: Exception?, override val triggerResults: Map, - val pplQueryResults: Map> // key: trigger id, value: query results + val pplQueryResults: Map>, // key: trigger id, value: query results ) : MonitorV2RunResult { - @Throws(IOException::class) @Suppress("UNCHECKED_CAST") constructor(sin: StreamInput) : this( sin.readString(), // monitorName sin.readException(), // error sin.readMap(STRING_READER, runResultReader()) as Map, // triggerResults - sin.readMap() as Map> // pplQueryResults + sin.readMap() as Map>, // pplQueryResults ) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject() builder.field(MONITOR_V2_NAME_FIELD, monitorName) builder.field(ERROR_FIELD, error?.message) @@ -59,16 +61,14 @@ data class PPLSQLMonitorRunResult( companion object { const val PPL_QUERY_RESULTS_FIELD = "ppl_query_results" - private fun runResultReader(): Writeable.Reader { - return Writeable.Reader { + private fun runResultReader(): Writeable.Reader = + Writeable.Reader { PPLSQLTriggerRunResult.readFrom(it) } - } - private fun runResultWriter(): Writeable.Writer { - return Writeable.Writer { streamOutput: StreamOutput, runResult: PPLSQLTriggerRunResult -> + private fun runResultWriter(): Writeable.Writer = + Writeable.Writer { streamOutput: StreamOutput, runResult: PPLSQLTriggerRunResult -> runResult.writeTo(streamOutput) } - } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTrigger.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTrigger.kt index b902d39a2..5ca776e52 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTrigger.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTrigger.kt @@ -80,9 +80,8 @@ data class PPLSQLTrigger( val conditionType: ConditionType, // NUMBER_OF_RESULTS or CUSTOM val numResultsCondition: NumResultsCondition?, val numResultsValue: Long?, - val customCondition: String? + val customCondition: String?, ) : TriggerV2 { - init { requireNotNull(this.name) { "Trigger name must be included." } requireNotNull(this.severity) { "Trigger severity must be included." } @@ -137,6 +136,7 @@ data class PPLSQLTrigger( "$CUSTOM_CONDITION_FIELD must not be included." } } + ConditionType.CUSTOM -> { requireNotNull(this.customCondition) { "if trigger condition is of type ${ConditionType.CUSTOM.value}, " + @@ -171,7 +171,7 @@ data class PPLSQLTrigger( sin.readEnum(ConditionType::class.java), // condition type if (sin.readBoolean()) sin.readEnum(NumResultsCondition::class.java) else null, // num results condition sin.readOptionalLong(), // num results value - sin.readOptionalString() // custom condition + sin.readOptionalString(), // custom condition ) @Throws(IOException::class) @@ -193,7 +193,10 @@ data class PPLSQLTrigger( out.writeOptionalString(customCondition) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params?): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params?, + ): XContentBuilder { builder.startObject() builder.field(ID_FIELD, id) builder.field(NAME_FIELD, name) @@ -211,8 +214,8 @@ data class PPLSQLTrigger( return builder } - fun asTemplateArg(): Map { - return mapOf( + fun asTemplateArg(): Map = + mapOf( ID_FIELD to id, NAME_FIELD to name, SEVERITY_FIELD to severity.value, @@ -223,35 +226,43 @@ data class PPLSQLTrigger( CONDITION_TYPE_FIELD to conditionType.value, NUM_RESULTS_CONDITION_FIELD to numResultsCondition?.value, NUM_RESULTS_VALUE_FIELD to numResultsValue, - CUSTOM_CONDITION_FIELD to customCondition + CUSTOM_CONDITION_FIELD to customCondition, ) - } - enum class TriggerMode(val value: String) { + enum class TriggerMode( + val value: String, + ) { RESULT_SET("result_set"), - PER_RESULT("per_result"); + PER_RESULT("per_result"), + ; companion object { fun enumFromString(value: String): TriggerMode? = entries.firstOrNull { it.value == value } } } - enum class ConditionType(val value: String) { + enum class ConditionType( + val value: String, + ) { NUMBER_OF_RESULTS("number_of_results"), - CUSTOM("custom"); + CUSTOM("custom"), + ; companion object { fun enumFromString(value: String): ConditionType? = entries.firstOrNull { it.value == value } } } - enum class NumResultsCondition(val value: String) { + enum class NumResultsCondition( + val value: String, + ) { GREATER_THAN(">"), GREATER_THAN_EQUAL(">="), LESS_THAN("<"), LESS_THAN_EQUAL("<="), EQUAL("=="), - NOT_EQUAL("!="); + NOT_EQUAL("!="), + ; companion object { fun enumFromString(value: String): NumResultsCondition? = entries.firstOrNull { it.value == value } @@ -273,11 +284,12 @@ data class PPLSQLTrigger( // only valid chars (letters, numbers, -, _) private val validCharsRegex = """^[a-zA-Z0-9_-]+$""".toRegex() - val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( - TriggerV2::class.java, - ParseField(PPL_SQL_TRIGGER_FIELD), - CheckedFunction { parseInner(it) } - ) + val XCONTENT_REGISTRY = + NamedXContentRegistry.Entry( + TriggerV2::class.java, + ParseField(PPL_SQL_TRIGGER_FIELD), + CheckedFunction { parseInner(it) }, + ) @JvmStatic @Throws(IOException::class) @@ -295,10 +307,11 @@ data class PPLSQLTrigger( var numResultsValue: Long? = null var customCondition: String? = null - /* parse */ + // parse XContentParserUtils.ensureExpectedToken( // outer trigger object start XContentParser.Token.START_OBJECT, - xcp.currentToken(), xcp + xcp.currentToken(), + xcp, ) while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { @@ -306,82 +319,106 @@ data class PPLSQLTrigger( xcp.nextToken() when (fieldName) { - ID_FIELD -> id = xcp.text() - NAME_FIELD -> name = xcp.text() + ID_FIELD -> { + id = xcp.text() + } + + NAME_FIELD -> { + name = xcp.text() + } + SEVERITY_FIELD -> { val input = xcp.text() - val enumMatchResult = Severity.enumFromString(input) - ?: throw IllegalArgumentException( - "Invalid value for $SEVERITY_FIELD: $input. " + - "Supported values are ${Severity.entries.map { it.value }}" - ) + val enumMatchResult = + Severity.enumFromString(input) + ?: throw IllegalArgumentException( + "Invalid value for $SEVERITY_FIELD: $input. " + + "Supported values are ${Severity.entries.map { it.value }}", + ) severity = enumMatchResult } + MODE_FIELD -> { val input = xcp.text() - val enumMatchResult = TriggerMode.enumFromString(input) - ?: throw IllegalArgumentException( - "Invalid value for $MODE_FIELD: $input. " + - "Supported values are ${TriggerMode.entries.map { it.value }}" - ) + val enumMatchResult = + TriggerMode.enumFromString(input) + ?: throw IllegalArgumentException( + "Invalid value for $MODE_FIELD: $input. " + + "Supported values are ${TriggerMode.entries.map { it.value }}", + ) mode = enumMatchResult } + CONDITION_TYPE_FIELD -> { val input = xcp.text() - val enumMatchResult = ConditionType.enumFromString(input) - ?: throw IllegalArgumentException( - "Invalid value for $CONDITION_TYPE_FIELD: $input. " + - "Supported values are ${ConditionType.entries.map { it.value }}" - ) + val enumMatchResult = + ConditionType.enumFromString(input) + ?: throw IllegalArgumentException( + "Invalid value for $CONDITION_TYPE_FIELD: $input. " + + "Supported values are ${ConditionType.entries.map { it.value }}", + ) conditionType = enumMatchResult } + NUM_RESULTS_CONDITION_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { val input = xcp.text() - val enumMatchResult = NumResultsCondition.enumFromString(input) - ?: throw IllegalArgumentException( - "Invalid value for $NUM_RESULTS_CONDITION_FIELD: $input. " + - "Supported values are ${NumResultsCondition.entries.map { it.value }}" - ) + val enumMatchResult = + NumResultsCondition.enumFromString(input) + ?: throw IllegalArgumentException( + "Invalid value for $NUM_RESULTS_CONDITION_FIELD: $input. " + + "Supported values are ${NumResultsCondition.entries.map { it.value }}", + ) numResultsCondition = enumMatchResult } } + NUM_RESULTS_VALUE_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { numResultsValue = xcp.longValue() } } + CUSTOM_CONDITION_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { customCondition = xcp.text() } } + THROTTLE_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { throttleDuration = xcp.longValue() } } + EXPIRE_FIELD -> { if (xcp.currentToken() != XContentParser.Token.VALUE_NULL) { expireDuration = xcp.longValue() } } - LAST_TRIGGERED_FIELD -> lastTriggeredTime = xcp.instant() + + LAST_TRIGGERED_FIELD -> { + lastTriggeredTime = xcp.instant() + } + ACTIONS_FIELD -> { XContentParserUtils.ensureExpectedToken( XContentParser.Token.START_ARRAY, xcp.currentToken(), - xcp + xcp, ) while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { actions.add(Action.parse(xcp)) } } - else -> throw IllegalArgumentException("Unexpected field when parsing PPL Trigger: $fieldName") + + else -> { + throw IllegalArgumentException("Unexpected field when parsing PPL Trigger: $fieldName") + } } } - /* validations */ + // validations requireNotNull(name) { "Trigger name must be included" } requireNotNull(severity) { "Trigger severity must be included" } requireNotNull(mode) { "Trigger mode must be included" } @@ -400,14 +437,12 @@ data class PPLSQLTrigger( conditionType, numResultsCondition, numResultsValue, - customCondition + customCondition, ) } @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): PPLSQLTrigger { - return PPLSQLTrigger(sin) - } + fun readFrom(sin: StreamInput): PPLSQLTrigger = PPLSQLTrigger(sin) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTriggerRunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTriggerRunResult.kt index 0c505d9e6..1d2b2bb05 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTriggerRunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/PPLSQLTriggerRunResult.kt @@ -25,16 +25,18 @@ data class PPLSQLTriggerRunResult( override var triggered: Boolean, override var error: Exception?, ) : TriggerV2RunResult { - @Throws(IOException::class) @Suppress("UNCHECKED_CAST") constructor(sin: StreamInput) : this( triggerName = sin.readString(), triggered = sin.readBoolean(), - error = sin.readException() + error = sin.readException(), ) - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject() builder.field(NAME_FIELD, triggerName) builder.field(TRIGGERED_FIELD, triggered) @@ -53,8 +55,6 @@ data class PPLSQLTriggerRunResult( companion object { @JvmStatic @Throws(IOException::class) - fun readFrom(sin: StreamInput): PPLSQLTriggerRunResult { - return PPLSQLTriggerRunResult(sin) - } + fun readFrom(sin: StreamInput): PPLSQLTriggerRunResult = PPLSQLTriggerRunResult(sin) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2.kt index a4fe90e14..f5d19cd0b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2.kt @@ -17,7 +17,6 @@ import java.time.Instant * @opensearch.experimental */ interface TriggerV2 : BaseModel { - val id: String val name: String val severity: Severity @@ -26,26 +25,28 @@ interface TriggerV2 : BaseModel { var lastTriggeredTime: Instant? val actions: List - enum class TriggerV2Type(val value: String) { - PPL_TRIGGER(PPL_SQL_TRIGGER_FIELD); + enum class TriggerV2Type( + val value: String, + ) { + PPL_TRIGGER(PPL_SQL_TRIGGER_FIELD), + ; - override fun toString(): String { - return value - } + override fun toString(): String = value } - enum class Severity(val value: String) { + enum class Severity( + val value: String, + ) { INFO("info"), ERROR("error"), LOW("low"), MEDIUM("medium"), HIGH("high"), - CRITICAL("critical"); + CRITICAL("critical"), + ; companion object { - fun enumFromString(value: String): Severity? { - return entries.find { it.value == value } - } + fun enumFromString(value: String): Severity? = entries.find { it.value == value } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2RunResult.kt b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2RunResult.kt index 8e3069972..7d5cbb9e7 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2RunResult.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/modelv2/TriggerV2RunResult.kt @@ -12,7 +12,9 @@ import org.opensearch.core.xcontent.ToXContent * Trigger V2 Run Result interface. All classes that store the run results * of an individual v2 trigger must implement this interface */ -interface TriggerV2RunResult : Writeable, ToXContent { +interface TriggerV2RunResult : + Writeable, + ToXContent { val triggerName: String val triggered: Boolean val error: Exception? diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteDocumentLevelMonitorRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteDocumentLevelMonitorRunner.kt index c12356cab..4ba2536cd 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteDocumentLevelMonitorRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteDocumentLevelMonitorRunner.kt @@ -43,7 +43,7 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { dryRun: Boolean, workflowRunContext: WorkflowRunContext?, executionId: String, - transportService: TransportService + transportService: TransportService, ): MonitorRunResult<*> { logger.debug("Remote Document-level-monitor is running ...") val isTempMonitor = dryRun || monitor.id == Monitor.NO_ID @@ -56,15 +56,20 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { monitorResult = monitorResult.copy(error = AlertingException.wrap(e)) } - var (monitorMetadata, _) = MonitorMetadataService.getOrCreateMetadata( - monitor = monitor, - createWithRunContext = false, - skipIndex = isTempMonitor, - workflowRunContext?.workflowMetadataId - ) + var (monitorMetadata, _) = + MonitorMetadataService.getOrCreateMetadata( + monitor = monitor, + createWithRunContext = false, + skipIndex = isTempMonitor, + workflowRunContext?.workflowMetadataId, + ) logger.info(monitorMetadata.lastRunContext.toMutableMap().toString()) - val lastRunContext = if (monitorMetadata.lastRunContext.isNullOrEmpty()) mutableMapOf() - else monitorMetadata.lastRunContext.toMutableMap() as MutableMap> + val lastRunContext = + if (monitorMetadata.lastRunContext.isNullOrEmpty()) { + mutableMapOf() + } else { + monitorMetadata.lastRunContext.toMutableMap() as MutableMap> + } val updatedLastRunContext = lastRunContext.toMutableMap() val remoteDocLevelMonitorInput = monitor.inputs[0] as RemoteDocLevelMonitorInput @@ -73,11 +78,12 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { var concreteIndices = listOf() // Resolve all passed indices to concrete indices - val allConcreteIndices = IndexUtils.resolveAllIndices( - docLevelMonitorInput.indices, - monitorCtx.clusterService!!, - monitorCtx.indexNameExpressionResolver!! - ) + val allConcreteIndices = + IndexUtils.resolveAllIndices( + docLevelMonitorInput.indices, + monitorCtx.clusterService!!, + monitorCtx.indexNameExpressionResolver!!, + ) // cleanup old indices that are not monitored anymore from the same monitor val runContextKeys = updatedLastRunContext.keys.toMutableSet() for (ind in runContextKeys) { @@ -89,11 +95,12 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { try { docLevelMonitorInput.indices.forEach { indexName -> - concreteIndices = IndexUtils.resolveAllIndices( - listOf(indexName), - monitorCtx.clusterService!!, - monitorCtx.indexNameExpressionResolver!! - ) + concreteIndices = + IndexUtils.resolveAllIndices( + listOf(indexName), + monitorCtx.clusterService!!, + monitorCtx.indexNameExpressionResolver!!, + ) var lastWriteIndex: String? = null if (IndexUtils.isAlias(indexName, monitorCtx.clusterService!!.state()) || IndexUtils.isDataStream(indexName, monitorCtx.clusterService!!.state()) @@ -102,37 +109,45 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { if (lastWriteIndex != null) { val lastWriteIndexCreationDate = IndexUtils.getCreationDateForIndex(lastWriteIndex, monitorCtx.clusterService!!.state()) - concreteIndices = IndexUtils.getNewestIndicesByCreationDate( - concreteIndices, - monitorCtx.clusterService!!.state(), - lastWriteIndexCreationDate - ) + concreteIndices = + IndexUtils.getNewestIndicesByCreationDate( + concreteIndices, + monitorCtx.clusterService!!.state(), + lastWriteIndexCreationDate, + ) } } concreteIndices.forEach { concreteIndexName -> // Prepare lastRunContext for each index - val indexLastRunContext = lastRunContext.getOrPut(concreteIndexName) { - val isIndexCreatedRecently = createdRecently( - monitor, - periodStart, - periodEnd, - monitorCtx.clusterService!!.state().metadata.index(concreteIndexName) - ) - MonitorMetadataService.createRunContextForIndex(concreteIndexName, isIndexCreatedRecently) - } + val indexLastRunContext = + lastRunContext.getOrPut(concreteIndexName) { + val isIndexCreatedRecently = + createdRecently( + monitor, + periodStart, + periodEnd, + monitorCtx.clusterService!! + .state() + .metadata + .index(concreteIndexName), + ) + MonitorMetadataService.createRunContextForIndex(concreteIndexName, isIndexCreatedRecently) + } val shardCount: Int = getShardsCount(monitorCtx.clusterService!!, concreteIndexName) - val indexUpdatedRunContext = initializeNewLastRunContext( - indexLastRunContext.toMutableMap(), - concreteIndexName, - shardCount - ) as MutableMap + val indexUpdatedRunContext = + initializeNewLastRunContext( + indexLastRunContext.toMutableMap(), + concreteIndexName, + shardCount, + ) as MutableMap if (IndexUtils.isAlias(indexName, monitorCtx.clusterService!!.state()) || IndexUtils.isDataStream(indexName, monitorCtx.clusterService!!.state()) ) { - if (concreteIndexName == IndexUtils.getWriteIndex( + if (concreteIndexName == + IndexUtils.getWriteIndex( indexName, - monitorCtx.clusterService!!.state() + monitorCtx.clusterService!!.state(), ) ) { updatedLastRunContext.remove(lastWriteIndex) @@ -152,31 +167,33 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { } val nodeMap = getNodes(monitorCtx) - val nodeShardAssignments = distributeShards( - monitorCtx, - nodeMap.keys.toList(), - shards.toList() - ) + val nodeShardAssignments = + distributeShards( + monitorCtx, + nodeMap.keys.toList(), + shards.toList(), + ) - val docLevelMonitorFanOutResponses = monitorCtx.remoteMonitors[monitor.monitorType]!!.monitorRunner.doFanOut( - monitorCtx.clusterService!!, - monitor, - monitorMetadata.copy(lastRunContext = lastRunContext), - executionId, - concreteIndices, - workflowRunContext, - dryRun, - transportService, - nodeMap, - nodeShardAssignments - ) + val docLevelMonitorFanOutResponses = + monitorCtx.remoteMonitors[monitor.monitorType]!!.monitorRunner.doFanOut( + monitorCtx.clusterService!!, + monitor, + monitorMetadata.copy(lastRunContext = lastRunContext), + executionId, + concreteIndices, + workflowRunContext, + dryRun, + transportService, + nodeMap, + nodeShardAssignments, + ) updateLastRunContextFromFanOutResponses(docLevelMonitorFanOutResponses, updatedLastRunContext) val triggerResults = buildTriggerResults(docLevelMonitorFanOutResponses) val inputRunResults = buildInputRunResults(docLevelMonitorFanOutResponses) if (!isTempMonitor) { MonitorMetadataService.upsertMetadata( monitorMetadata.copy(lastRunContext = updatedLastRunContext), - true + true, ) } return monitorResult.copy(triggerResults = triggerResults, inputResults = inputRunResults) @@ -184,11 +201,12 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { logger.error("Failed running Document-level-monitor ${monitor.name}", e) val errorMessage = ExceptionsHelper.detailedMessage(e) monitorCtx.alertService!!.upsertMonitorErrorAlert(monitor, errorMessage, executionId, workflowRunContext) - val alertingException = AlertingException( - errorMessage, - RestStatus.INTERNAL_SERVER_ERROR, - e - ) + val alertingException = + AlertingException( + errorMessage, + RestStatus.INTERNAL_SERVER_ERROR, + e, + ) return monitorResult.copy(error = alertingException, inputResults = InputRunResults(emptyList(), alertingException)) } } @@ -207,9 +225,10 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { } } - private fun getNodes(monitorCtx: MonitorRunnerExecutionContext): Map { - return monitorCtx.clusterService!!.state().nodes.dataNodes.filter { it.value.version >= Version.CURRENT } - } + private fun getNodes(monitorCtx: MonitorRunnerExecutionContext): Map = + monitorCtx.clusterService!!.state().nodes.dataNodes.filter { + it.value.version >= Version.CURRENT + } private fun distributeShards( monitorCtx: MonitorRunnerExecutionContext, @@ -222,11 +241,19 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { val shardsPerNode = totalShards / totalNodes var shardsRemaining = totalShards % totalNodes - val shardIdList = shards.map { - val index = it.split(":")[0] - val shardId = it.split(":")[1] - ShardId(monitorCtx.clusterService!!.state().metadata.index(index).index, shardId.toInt()) - } + val shardIdList = + shards.map { + val index = it.split(":")[0] + val shardId = it.split(":")[1] + ShardId( + monitorCtx.clusterService!! + .state() + .metadata + .index(index) + .index, + shardId.toInt(), + ) + } val nodes = allNodes.subList(0, totalNodes) val nodeShardAssignments = mutableMapOf>() @@ -249,7 +276,10 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { return nodeShardAssignments } - private fun getShardsCount(clusterService: ClusterService, index: String): Int { + private fun getShardsCount( + clusterService: ClusterService, + index: String, + ): Int { val allShards: List = clusterService!!.state().routingTable().allShards(index) return allShards.filter { it.primary() }.size } @@ -258,7 +288,6 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { docLevelMonitorFanOutResponses: MutableList, updatedLastRunContext: MutableMap>, ) { - // Prepare updatedLastRunContext for each index for (indexName in updatedLastRunContext.keys) { for (fanOutResponse in docLevelMonitorFanOutResponses) { @@ -268,15 +297,14 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { if (fanOutResponse.lastRunContexts.contains(indexName)) { (fanOutResponse.lastRunContexts[indexName] as Map).forEach { - - val seq_no = it.value.toString().toLongOrNull() + val seqNo = it.value.toString().toLongOrNull() if ( it.key != "shards_count" && it.key != "index" && - seq_no != null && - seq_no >= 0L + seqNo != null && + seqNo >= 0L ) { - indexLastRunContext[it.key] = seq_no + indexLastRunContext[it.key] = seqNo } } } @@ -297,16 +325,18 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { if (documentLevelTriggerRunResult != null) { if (false == triggerResults.contains(triggerId)) { triggerResults[triggerId] = documentLevelTriggerRunResult - triggerErrorMap[triggerId] = if (documentLevelTriggerRunResult.error != null) { - val error = if (documentLevelTriggerRunResult.error is AlertingException) { - documentLevelTriggerRunResult.error as AlertingException + triggerErrorMap[triggerId] = + if (documentLevelTriggerRunResult.error != null) { + val error = + if (documentLevelTriggerRunResult.error is AlertingException) { + documentLevelTriggerRunResult.error as AlertingException + } else { + AlertingException.wrap(documentLevelTriggerRunResult.error!!) as AlertingException + } + mutableListOf(error) } else { - AlertingException.wrap(documentLevelTriggerRunResult.error!!) as AlertingException + mutableListOf() } - mutableListOf(error) - } else { - mutableListOf() - } } else { val currVal = triggerResults[triggerId] val newTriggeredDocs = mutableListOf() @@ -315,10 +345,11 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { val newActionResults = mutableMapOf>() newActionResults.putAll(currVal.actionResultsMap) newActionResults.putAll(documentLevelTriggerRunResult.actionResultsMap) - triggerResults[triggerId] = currVal.copy( - triggeredDocs = newTriggeredDocs, - actionResultsMap = newActionResults - ) + triggerResults[triggerId] = + currVal.copy( + triggeredDocs = newTriggeredDocs, + actionResultsMap = newActionResults, + ) if (documentLevelTriggerRunResult.error != null) { triggerErrorMap[triggerId]!!.add(documentLevelTriggerRunResult.error as AlertingException) @@ -364,7 +395,7 @@ class RemoteDocumentLevelMonitorRunner : MonitorRunner() { monitor: Monitor, periodStart: Instant, periodEnd: Instant, - indexMetadata: IndexMetadata + indexMetadata: IndexMetadata, ): Boolean { val lastExecutionTime = if (periodStart == periodEnd) monitor.lastUpdateTime else periodStart val indexCreationDate = indexMetadata.settings.get("index.creation_date")?.toLong() ?: 0L diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteMonitorRegistry.kt b/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteMonitorRegistry.kt index 527ee2ef2..c2c4d82bd 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteMonitorRegistry.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/remote/monitors/RemoteMonitorRegistry.kt @@ -10,4 +10,7 @@ import org.opensearch.alerting.spi.RemoteMonitorRunner /** * Class to store monitorType to monitorRunner tuples. */ -class RemoteMonitorRegistry(val monitorType: String, val monitorRunner: RemoteMonitorRunner) +class RemoteMonitorRegistry( + val monitorType: String, + val monitorRunner: RemoteMonitorRunner, +) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/AsyncActionHandler.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/AsyncActionHandler.kt index 8f14b0c69..a2892d3e4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/AsyncActionHandler.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/AsyncActionHandler.kt @@ -9,8 +9,10 @@ import org.opensearch.rest.BytesRestResponse import org.opensearch.rest.RestChannel import org.opensearch.transport.client.node.NodeClient -abstract class AsyncActionHandler(protected val client: NodeClient, protected val channel: RestChannel) { - +abstract class AsyncActionHandler( + protected val client: NodeClient, + protected val channel: RestChannel, +) { protected fun onFailure(e: Exception) { channel.sendResponse(BytesRestResponse(channel, e)) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeAlertAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeAlertAction.kt index 1b2ee4521..15cfda587 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeAlertAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeAlertAction.kt @@ -32,14 +32,9 @@ private val log: Logger = LogManager.getLogger(RestAcknowledgeAlertAction::class * the ids to the alerts he would like to acknowledge. */ class RestAcknowledgeAlertAction : BaseRestHandler() { + override fun getName(): String = "acknowledge_alert_action" - override fun getName(): String { - return "acknowledge_alert_action" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() override fun replacedRoutes(): MutableList { // Acknowledge alerts @@ -48,13 +43,16 @@ class RestAcknowledgeAlertAction : BaseRestHandler() { POST, "${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}/_acknowledge/alerts", POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}/_acknowledge/alerts" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}/_acknowledge/alerts", + ), ) } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}/_acknowledge/alerts") val monitorId = request.param("monitorID") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeChainedAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeChainedAlertsAction.kt index 34b663cf4..d8995d06c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeChainedAlertsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestAcknowledgeChainedAlertsAction.kt @@ -13,7 +13,6 @@ import org.opensearch.commons.alerting.action.AlertingActions import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import org.opensearch.rest.BaseRestHandler -import org.opensearch.rest.BaseRestHandler.RestChannelConsumer import org.opensearch.rest.RestHandler.Route import org.opensearch.rest.RestRequest import org.opensearch.rest.RestRequest.Method.POST @@ -28,24 +27,24 @@ private val log: Logger = LogManager.getLogger(RestAcknowledgeAlertAction::class * The user provides the workflowID to which these alerts pertain and in the content of the request provides * the ids to the chained alerts user would like to acknowledge. */ -class RestAcknowledgeChainedAlertAction : BaseRestHandler() { - - override fun getName(): String { - return "acknowledge_chained_alert_action" - } +class RestAcknowledgeChainedAlertsAction : BaseRestHandler() { + override fun getName(): String = "acknowledge_chained_alert_action" override fun routes(): List { // Acknowledge alerts return mutableListOf( Route( POST, - "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}/_acknowledge/alerts" - ) + "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}/_acknowledge/alerts", + ), ) } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}/_acknowledge/alerts") val workflowId = request.param("workflowID") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteAlertingCommentAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteAlertingCommentAction.kt index 0c5f956db..db83aa2df 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteAlertingCommentAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteAlertingCommentAction.kt @@ -23,22 +23,21 @@ private val log: Logger = LogManager.getLogger(RestDeleteMonitorAction::class.ja * Rest handlers to create and update comments. */ class RestDeleteAlertingCommentAction : BaseRestHandler() { + override fun getName(): String = "delete_alerting_comment_action" - override fun getName(): String { - return "delete_alerting_comment_action" - } - - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( RestRequest.Method.DELETE, - "${AlertingPlugin.COMMENTS_BASE_URI}/{id}" - ) + "${AlertingPlugin.COMMENTS_BASE_URI}/{id}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.info("${request.method()} ${AlertingPlugin.COMMENTS_BASE_URI}/{id}") val commentId = request.param("id") val deleteMonitorRequest = DeleteCommentRequest(commentId) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt index bebad4bd1..a987118d2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteMonitorAction.kt @@ -22,34 +22,32 @@ import org.opensearch.transport.client.node.NodeClient import java.io.IOException private val log: Logger = LogManager.getLogger(RestDeleteMonitorAction::class.java) + /** * This class consists of the REST handler to delete monitors. * When a monitor is deleted, all alerts are moved to the [Alert.State.DELETED] state and moved to the alert history index. * If this process fails the monitor is not deleted. */ class RestDeleteMonitorAction : BaseRestHandler() { + override fun getName(): String = "delete_monitor_action" - override fun getName(): String { - return "delete_monitor_action" - } + override fun routes(): List = listOf() - override fun routes(): List { - return listOf() - } - - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( DELETE, "${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}", DELETE, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}") val monitorId = request.param("monitorID") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteWorkflowAction.kt index db64181ef..0fef82a2a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestDeleteWorkflowAction.kt @@ -22,24 +22,23 @@ import java.io.IOException * This class consists of the REST handler to delete workflows. */ class RestDeleteWorkflowAction : BaseRestHandler() { - private val log = LogManager.getLogger(javaClass) - override fun getName(): String { - return "delete_workflow_action" - } + override fun getName(): String = "delete_workflow_action" - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( RestHandler.Route( RestRequest.Method.DELETE, - "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}" - ) + "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}") val workflowId = request.param("workflowID") @@ -52,8 +51,9 @@ class RestDeleteWorkflowAction : BaseRestHandler() { return RestChannelConsumer { channel -> client.execute( - AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, deleteWorkflowRequest, - RestToXContentListener(channel) + AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, + deleteWorkflowRequest, + RestToXContentListener(channel), ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt index 4dd4f588b..70184319b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteMonitorAction.kt @@ -27,31 +27,30 @@ import java.time.Instant private val log = LogManager.getLogger(RestExecuteMonitorAction::class.java) class RestExecuteMonitorAction : BaseRestHandler() { - override fun getName(): String = "execute_monitor_action" - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( POST, "${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}/_execute", POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}/_execute" + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}/_execute", ), ReplacedRoute( POST, "${AlertingPlugin.MONITOR_BASE_URI}/_execute", POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/_execute" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/_execute", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_BASE_URI}/_execute") return RestChannelConsumer { channel -> @@ -79,7 +78,5 @@ class RestExecuteMonitorAction : BaseRestHandler() { } } - override fun responseParams(): Set { - return setOf("dryrun", "period_end", "monitorID") - } + override fun responseParams(): Set = setOf("dryrun", "period_end", "monitorID") } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt index 096f03010..27ba53f54 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestExecuteWorkflowAction.kt @@ -23,16 +23,17 @@ import java.time.Instant private val log = LogManager.getLogger(RestExecuteWorkflowAction::class.java) class RestExecuteWorkflowAction : BaseRestHandler() { - override fun getName(): String = "execute_workflow_action" - override fun routes(): List { - return listOf( - RestHandler.Route(RestRequest.Method.POST, "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}/_execute") + override fun routes(): List = + listOf( + RestHandler.Route(RestRequest.Method.POST, "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}/_execute"), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/_execute") return RestChannelConsumer { channel -> @@ -53,7 +54,5 @@ class RestExecuteWorkflowAction : BaseRestHandler() { } } - override fun responseParams(): Set { - return setOf("dryrun", "period_end", "workflowID") - } + override fun responseParams(): Set = setOf("dryrun", "period_end", "workflowID") } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt index d147aa299..9b1a6bde1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetAlertsAction.kt @@ -23,29 +23,26 @@ import org.opensearch.transport.client.node.NodeClient * This class consists of the REST handler to retrieve alerts . */ class RestGetAlertsAction : BaseRestHandler() { - private val log = LogManager.getLogger(RestGetAlertsAction::class.java) - override fun getName(): String { - return "get_alerts_action" - } + override fun getName(): String = "get_alerts_action" - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( GET, "${AlertingPlugin.MONITOR_BASE_URI}/alerts", GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/alerts" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/alerts", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_BASE_URI}/alerts") val sortString = request.param("sortString", "monitor_name.keyword") @@ -64,18 +61,18 @@ class RestGetAlertsAction : BaseRestHandler() { } else { workflowIds.add("") } - val table = Table( - sortOrder, - sortString, - missing, - size, - startIndex, - searchString - ) + val table = + Table( + sortOrder, + sortString, + missing, + size, + startIndex, + searchString, + ) val getAlertsRequest = GetAlertsRequest(table, severityLevel, alertState, monitorId, null, workflowIds = workflowIds) - return RestChannelConsumer { - channel -> + return RestChannelConsumer { channel -> client.execute(AlertingActions.GET_ALERTS_ACTION_TYPE, getAlertsRequest, RestToXContentListener(channel)) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt index 2323aa80f..c0f94e8f5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetDestinationsAction.kt @@ -25,36 +25,33 @@ import org.opensearch.transport.client.node.NodeClient * This class consists of the REST handler to retrieve destinations . */ class RestGetDestinationsAction : BaseRestHandler() { - private val log = LogManager.getLogger(RestGetDestinationsAction::class.java) - override fun getName(): String { - return "get_destinations_action" - } + override fun getName(): String = "get_destinations_action" - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( // Get a specific destination ReplacedRoute( RestRequest.Method.GET, "${AlertingPlugin.DESTINATION_BASE_URI}/{destinationID}", RestRequest.Method.GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_DESTINATION_BASE_URI}/{destinationID}" + "${AlertingPlugin.LEGACY_OPENDISTRO_DESTINATION_BASE_URI}/{destinationID}", ), ReplacedRoute( RestRequest.Method.GET, AlertingPlugin.DESTINATION_BASE_URI, RestRequest.Method.GET, - AlertingPlugin.LEGACY_OPENDISTRO_DESTINATION_BASE_URI - ) + AlertingPlugin.LEGACY_OPENDISTRO_DESTINATION_BASE_URI, + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${request.path()}") val destinationId: String? = request.param("destinationID") @@ -72,24 +69,25 @@ class RestGetDestinationsAction : BaseRestHandler() { val searchString = request.param("searchString", "") val destinationType = request.param("destinationType", "ALL") - val table = Table( - sortOrder, - sortString, - missing, - size, - startIndex, - searchString - ) + val table = + Table( + sortOrder, + sortString, + missing, + size, + startIndex, + searchString, + ) - val getDestinationsRequest = GetDestinationsRequest( - destinationId, - RestActions.parseVersion(request), - srcContext, - table, - destinationType - ) - return RestChannelConsumer { - channel -> + val getDestinationsRequest = + GetDestinationsRequest( + destinationId, + RestActions.parseVersion(request), + srcContext, + table, + destinationType, + ) + return RestChannelConsumer { channel -> client.execute(GetDestinationsAction.INSTANCE, getDestinationsRequest, RestToXContentListener(channel)) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailAccountAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailAccountAction.kt index 0190cc490..087ce7282 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailAccountAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailAccountAction.kt @@ -23,33 +23,30 @@ import java.lang.IllegalArgumentException * Rest handler to retrieve an EmailAccount. */ class RestGetEmailAccountAction : BaseRestHandler() { + override fun getName(): String = "get_email_account_action" - override fun getName(): String { - return "get_email_account_action" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( RestRequest.Method.GET, "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/{emailAccountID}", RestRequest.Method.GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/{emailAccountID}" + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/{emailAccountID}", ), ReplacedRoute( RestRequest.Method.HEAD, "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/{emailAccountID}", RestRequest.Method.HEAD, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/{emailAccountID}" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/{emailAccountID}", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { val emailAccountID = request.param("emailAccountID") if (emailAccountID == null || emailAccountID.isEmpty()) { throw IllegalArgumentException("Missing email account ID") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailGroupAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailGroupAction.kt index 8b654bdb7..e3929f38d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailGroupAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetEmailGroupAction.kt @@ -23,33 +23,30 @@ import java.lang.IllegalArgumentException * Rest handlers to retrieve an EmailGroup */ class RestGetEmailGroupAction : BaseRestHandler() { + override fun getName(): String = "get_email_group_action" - override fun getName(): String { - return "get_email_group_action" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( RestRequest.Method.GET, "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/{emailGroupID}", RestRequest.Method.GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/{emailGroupID}" + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/{emailGroupID}", ), ReplacedRoute( RestRequest.Method.HEAD, "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/{emailGroupID}", RestRequest.Method.HEAD, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/{emailGroupID}" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/{emailGroupID}", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { val emailGroupID = request.param("emailGroupID") if (emailGroupID == null || emailGroupID.isEmpty()) { throw IllegalArgumentException("Missing email group ID") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt index ebcb447c3..6928877bb 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetFindingsAction.kt @@ -22,20 +22,19 @@ import org.opensearch.transport.client.node.NodeClient * This class consists of the REST handler to search findings . */ class RestGetFindingsAction : BaseRestHandler() { - private val log = LogManager.getLogger(RestGetFindingsAction::class.java) - override fun getName(): String { - return "get_findings_action" - } + override fun getName(): String = "get_findings_action" - override fun routes(): List { - return listOf( - Route(GET, "${AlertingPlugin.FINDING_BASE_URI}/_search") + override fun routes(): List = + listOf( + Route(GET, "${AlertingPlugin.FINDING_BASE_URI}/_search"), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.info("${request.method()} ${request.path()}") val findingID: String? = request.param("findingId") @@ -46,21 +45,22 @@ class RestGetFindingsAction : BaseRestHandler() { val startIndex = request.paramAsInt("startIndex", 0) val searchString = request.param("searchString", "") - val table = Table( - sortOrder, - sortString, - missing, - size, - startIndex, - searchString - ) + val table = + Table( + sortOrder, + sortString, + missing, + size, + startIndex, + searchString, + ) - val getFindingsSearchRequest = GetFindingsRequest( - findingID, - table - ) - return RestChannelConsumer { - channel -> + val getFindingsSearchRequest = + GetFindingsRequest( + findingID, + table, + ) + return RestChannelConsumer { channel -> client.execute(AlertingActions.GET_FINDINGS_ACTION_TYPE, getFindingsSearchRequest, RestToXContentListener(channel)) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetMonitorAction.kt index 888038883..8717cca91 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetMonitorAction.kt @@ -27,34 +27,31 @@ private val log = LogManager.getLogger(RestGetMonitorAction::class.java) * This class consists of the REST handler to retrieve a monitor . */ class RestGetMonitorAction : BaseRestHandler() { + override fun getName(): String = "get_monitor_action" - override fun getName(): String { - return "get_monitor_action" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( // Get a specific monitor ReplacedRoute( GET, "${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}", GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}" + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}", ), ReplacedRoute( HEAD, "${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}", HEAD, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}") val monitorId = request.param("monitorID") @@ -67,8 +64,7 @@ class RestGetMonitorAction : BaseRestHandler() { srcContext = FetchSourceContext.DO_NOT_FETCH_SOURCE } val getMonitorRequest = GetMonitorRequest(monitorId, RestActions.parseVersion(request), request.method(), srcContext) - return RestChannelConsumer { - channel -> + return RestChannelConsumer { channel -> client.execute(AlertingActions.GET_MONITOR_ACTION_TYPE, getMonitorRequest, RestToXContentListener(channel)) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetRemoteIndexesAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetRemoteIndexesAction.kt index 5187e2a05..720c72ecf 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetRemoteIndexesAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetRemoteIndexesAction.kt @@ -23,29 +23,28 @@ class RestGetRemoteIndexesAction : BaseRestHandler() { val ROUTE = "${AlertingPlugin.REMOTE_BASE_URI}/indexes" } - override fun getName(): String { - return "get_remote_indexes_action" - } + override fun getName(): String = "get_remote_indexes_action" - override fun routes(): List { - return mutableListOf( - RestHandler.Route(RestRequest.Method.GET, ROUTE) + override fun routes(): List = + mutableListOf( + RestHandler.Route(RestRequest.Method.GET, ROUTE), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.info("${request.method()} $ROUTE") val indexes = Strings.splitStringByCommaToArray(request.param(GetRemoteIndexesRequest.INDEXES_FIELD, "")) val includeMappings = request.paramAsBoolean(GetRemoteIndexesRequest.INCLUDE_MAPPINGS_FIELD, false) - return RestChannelConsumer { - channel -> + return RestChannelConsumer { channel -> client.execute( GetRemoteIndexesAction.INSTANCE, GetRemoteIndexesRequest( indexes = indexes.toList(), - includeMappings = includeMappings + includeMappings = includeMappings, ), - RestToXContentListener(channel) + RestToXContentListener(channel), ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAction.kt index 6af0b4604..bc1b558bc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAction.kt @@ -21,23 +21,22 @@ import org.opensearch.transport.client.node.NodeClient * This class consists of the REST handler to retrieve a workflow . */ class RestGetWorkflowAction : BaseRestHandler() { - private val log = LogManager.getLogger(javaClass) - override fun getName(): String { - return "get_workflow_action" - } + override fun getName(): String = "get_workflow_action" - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( RestHandler.Route( RestRequest.Method.GET, - "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}" - ) + "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}") val workflowId = request.param("workflowID") @@ -51,8 +50,7 @@ class RestGetWorkflowAction : BaseRestHandler() { } val getWorkflowRequest = GetWorkflowRequest(workflowId, request.method()) - return RestChannelConsumer { - channel -> + return RestChannelConsumer { channel -> client.execute(AlertingActions.GET_WORKFLOW_ACTION_TYPE, getWorkflowRequest, RestToXContentListener(channel)) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAlertsAction.kt index 5356ba730..21fbe03c9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAlertsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestGetWorkflowAlertsAction.kt @@ -23,27 +23,24 @@ import org.opensearch.transport.client.node.NodeClient * This class consists of the REST handler to retrieve chained alerts by workflow id. */ class RestGetWorkflowAlertsAction : BaseRestHandler() { - private val log = LogManager.getLogger(RestGetWorkflowAlertsAction::class.java) - override fun getName(): String { - return "get_workflow_alerts_action" - } + override fun getName(): String = "get_workflow_alerts_action" - override fun routes(): List { - return mutableListOf( + override fun routes(): List = + mutableListOf( Route( GET, - "${AlertingPlugin.WORKFLOW_BASE_URI}/alerts" - ) + "${AlertingPlugin.WORKFLOW_BASE_URI}/alerts", + ), ) - } - override fun replacedRoutes(): MutableList { - return mutableListOf() - } + override fun replacedRoutes(): MutableList = mutableListOf() - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.WORKFLOW_BASE_URI}/alerts") val sortString = request.param("sortString", "monitor_name.keyword") @@ -65,26 +62,28 @@ class RestGetWorkflowAlertsAction : BaseRestHandler() { if (alertId.isNullOrEmpty() == false) { alertIds.add(alertId) } - val table = Table( - sortOrder, - sortString, - missing, - size, - startIndex, - searchString - ) + val table = + Table( + sortOrder, + sortString, + missing, + size, + startIndex, + searchString, + ) - val getWorkflowAlertsRequest = GetWorkflowAlertsRequest( - table, - severityLevel, - alertState, - alertIndex = null, - associatedAlertsIndex = null, - workflowIds = workflowIds, - monitorIds = emptyList(), - getAssociatedAlerts = getAssociatedAlerts, - alertIds = alertIds - ) + val getWorkflowAlertsRequest = + GetWorkflowAlertsRequest( + table, + severityLevel, + alertState, + alertIndex = null, + associatedAlertsIndex = null, + workflowIds = workflowIds, + monitorIds = emptyList(), + getAssociatedAlerts = getAssociatedAlerts, + alertIds = alertIds, + ) return RestChannelConsumer { channel -> client.execute(AlertingActions.GET_WORKFLOW_ALERTS_ACTION_TYPE, getWorkflowAlertsRequest, RestToXContentListener(channel)) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexAlertingCommentAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexAlertingCommentAction.kt index 0b9905ebb..fec5cdea9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexAlertingCommentAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexAlertingCommentAction.kt @@ -34,32 +34,32 @@ private val log = LogManager.getLogger(RestIndexMonitorAction::class.java) * Rest handlers to create and update alerting comments. */ class RestIndexAlertingCommentAction : BaseRestHandler() { + override fun getName(): String = "index_alerting_comment_action" - override fun getName(): String { - return "index_alerting_comment_action" - } - - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( RestRequest.Method.POST, - "${AlertingPlugin.COMMENTS_BASE_URI}/{id}" + "${AlertingPlugin.COMMENTS_BASE_URI}/{id}", ), Route( RestRequest.Method.PUT, - "${AlertingPlugin.COMMENTS_BASE_URI}/{id}" - ) + "${AlertingPlugin.COMMENTS_BASE_URI}/{id}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.info("${request.method()} ${AlertingPlugin.COMMENTS_BASE_URI}") - val id = request.param( - "id", - if (request.method() == RestRequest.Method.POST) Alert.NO_ID else Comment.NO_ID - ) + val id = + request.param( + "id", + if (request.method() == RestRequest.Method.POST) Alert.NO_ID else Comment.NO_ID, + ) if (request.method() == RestRequest.Method.POST && Alert.NO_ID == id) { throw AlertingException.wrap(IllegalArgumentException("Missing alert ID")) } else if (request.method() == RestRequest.Method.PUT && Comment.NO_ID == id) { @@ -76,29 +76,33 @@ class RestIndexAlertingCommentAction : BaseRestHandler() { val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - val indexCommentRequest = IndexCommentRequest( - alertId, - "alert", - commentId, - seqNo, - primaryTerm, - request.method(), - content - ) + val indexCommentRequest = + IndexCommentRequest( + alertId, + "alert", + commentId, + seqNo, + primaryTerm, + request.method(), + content, + ) return RestChannelConsumer { channel -> client.execute(AlertingActions.INDEX_COMMENT_ACTION_TYPE, indexCommentRequest, indexCommentResponse(channel, request.method())) } } - private fun indexCommentResponse(channel: RestChannel, restMethod: RestRequest.Method): - RestResponseListener { + private fun indexCommentResponse( + channel: RestChannel, + restMethod: RestRequest.Method, + ): RestResponseListener { return object : RestResponseListener(channel) { @Throws(Exception::class) override fun buildResponse(response: IndexCommentResponse): RestResponse { var returnStatus = RestStatus.CREATED - if (restMethod == RestRequest.Method.PUT) + if (restMethod == RestRequest.Method.PUT) { returnStatus = RestStatus.OK + } val restResponse = BytesRestResponse(returnStatus, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)) if (returnStatus == RestStatus.CREATED) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt index 5f753edd1..063c82122 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexMonitorAction.kt @@ -43,7 +43,7 @@ import org.opensearch.rest.action.RestResponseListener import org.opensearch.transport.client.node.NodeClient import java.io.IOException import java.time.Instant -import java.util.* +import java.util.Locale private val log = LogManager.getLogger(RestIndexMonitorAction::class.java) @@ -51,34 +51,31 @@ private val log = LogManager.getLogger(RestIndexMonitorAction::class.java) * Rest handlers to create and update monitors. */ class RestIndexMonitorAction : BaseRestHandler() { + override fun getName(): String = "index_monitor_action" - override fun getName(): String { - return "index_monitor_action" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( POST, AlertingPlugin.MONITOR_BASE_URI, POST, - AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI + AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI, ), ReplacedRoute( PUT, "${AlertingPlugin.MONITOR_BASE_URI}/{monitorID}", PUT, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/{monitorID}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_BASE_URI}") val id = request.param("monitorID", Monitor.NO_ID) @@ -143,11 +140,12 @@ class RestIndexMonitorAction : BaseRestHandler() { val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } val indexMonitorRequest = IndexMonitorRequest(id, seqNo, primaryTerm, refreshPolicy, request.method(), monitor, rbacRoles) return RestChannelConsumer { channel -> @@ -161,7 +159,7 @@ class RestIndexMonitorAction : BaseRestHandler() { if (!isValidName(dlq.name)) { throw IllegalArgumentException( "Doc level query name may not start with [_, +, -], contain '..', or contain: " + - getInvalidNameChars().replace("\\", "") + getInvalidNameChars().replace("\\", ""), ) } } @@ -180,14 +178,17 @@ class RestIndexMonitorAction : BaseRestHandler() { } } - private fun indexMonitorResponse(channel: RestChannel, restMethod: RestRequest.Method): - RestResponseListener { + private fun indexMonitorResponse( + channel: RestChannel, + restMethod: RestRequest.Method, + ): RestResponseListener { return object : RestResponseListener(channel) { @Throws(Exception::class) override fun buildResponse(response: IndexMonitorResponse): RestResponse { var returnStatus = RestStatus.CREATED - if (restMethod == RestRequest.Method.PUT) + if (restMethod == RestRequest.Method.PUT) { returnStatus = RestStatus.OK + } val restResponse = BytesRestResponse(returnStatus, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)) if (returnStatus == RestStatus.CREATED) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexWorkflowAction.kt index 032c7921b..2d87fd2ce 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestIndexWorkflowAction.kt @@ -35,23 +35,22 @@ import java.time.Instant * Rest handlers to create and update workflows. */ class RestIndexWorkflowAction : BaseRestHandler() { + override fun getName(): String = "index_workflow_action" - override fun getName(): String { - return "index_workflow_action" - } - - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( RestHandler.Route(RestRequest.Method.POST, AlertingPlugin.WORKFLOW_BASE_URI), RestHandler.Route( RestRequest.Method.PUT, - "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}" - ) + "${AlertingPlugin.WORKFLOW_BASE_URI}/{workflowID}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { val id = request.param("workflowID", Workflow.NO_ID) if (request.method() == RestRequest.Method.PUT && Workflow.NO_ID == id) { throw AlertingException.wrap(IllegalArgumentException("Missing workflow ID")) @@ -65,11 +64,12 @@ class RestIndexWorkflowAction : BaseRestHandler() { val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } val workflowRequest = IndexWorkflowRequest(id, seqNo, primaryTerm, refreshPolicy, request.method(), workflow, rbacRoles) @@ -78,13 +78,17 @@ class RestIndexWorkflowAction : BaseRestHandler() { } } - private fun indexMonitorResponse(channel: RestChannel, restMethod: RestRequest.Method): RestResponseListener { + private fun indexMonitorResponse( + channel: RestChannel, + restMethod: RestRequest.Method, + ): RestResponseListener { return object : RestResponseListener(channel) { @Throws(Exception::class) override fun buildResponse(response: IndexWorkflowResponse): RestResponse { var returnStatus = RestStatus.CREATED - if (restMethod == RestRequest.Method.PUT) + if (restMethod == RestRequest.Method.PUT) { returnStatus = RestStatus.OK + } val restResponse = BytesRestResponse(returnStatus, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchAlertingCommentAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchAlertingCommentAction.kt index f2afd0195..37eadb6a5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchAlertingCommentAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchAlertingCommentAction.kt @@ -36,36 +36,36 @@ private val log = LogManager.getLogger(RestIndexMonitorAction::class.java) /** * Rest handler to search alerting comments. */ -class RestSearchAlertingCommentAction() : BaseRestHandler() { +class RestSearchAlertingCommentAction : BaseRestHandler() { + override fun getName(): String = "search_alerting_comments_action" - override fun getName(): String { - return "search_alerting_comments_action" - } - - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( RestRequest.Method.GET, - "${AlertingPlugin.COMMENTS_BASE_URI}/_search" + "${AlertingPlugin.COMMENTS_BASE_URI}/_search", ), Route( RestRequest.Method.POST, - "${AlertingPlugin.COMMENTS_BASE_URI}/_search" - ) + "${AlertingPlugin.COMMENTS_BASE_URI}/_search", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.info("${request.method()} ${AlertingPlugin.COMMENTS_BASE_URI}/_search") val searchSourceBuilder = SearchSourceBuilder() searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()) searchSourceBuilder.fetchSource(context(request)) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(ALL_COMMENTS_INDEX_PATTERN) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(ALL_COMMENTS_INDEX_PATTERN) val searchCommentRequest = SearchCommentRequest(searchRequest) return RestChannelConsumer { channel -> @@ -84,16 +84,18 @@ class RestSearchAlertingCommentAction() : BaseRestHandler() { // Swallow exception and return response as is try { for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - channel.request().xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - hit.sourceAsString - ).use { hitsParser -> - hitsParser.nextToken() - val comment = Comment.parse(hitsParser, hit.id) - val xcb = comment.toXContent(jsonBuilder(), EMPTY_PARAMS) - hit.sourceRef(BytesReference.bytes(xcb)) - } + XContentType.JSON + .xContent() + .createParser( + channel.request().xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + hitsParser.nextToken() + val comment = Comment.parse(hitsParser, hit.id) + val xcb = comment.toXContent(jsonBuilder(), EMPTY_PARAMS) + hit.sourceRef(BytesReference.bytes(xcb)) + } } } catch (e: Exception) { log.error("The comment parsing failed. Will return response as is.") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt index 0777a9aa8..407d61f0a 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailAccountAction.kt @@ -35,48 +35,48 @@ import java.io.IOException * Rest handlers to search for EmailAccount */ class RestSearchEmailAccountAction : BaseRestHandler() { + override fun getName(): String = "search_email_account_action" - override fun getName(): String { - return "search_email_account_action" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( RestRequest.Method.POST, "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/_search", RestRequest.Method.POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/_search" + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/_search", ), ReplacedRoute( RestRequest.Method.GET, "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/_search", RestRequest.Method.GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/_search" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_ACCOUNT_BASE_URI}/_search", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { val searchSourceBuilder = SearchSourceBuilder() searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()) searchSourceBuilder.fetchSource(context(request)) // An exists query is added on top of the user's query to ensure that only documents of email_account type // are searched - searchSourceBuilder.query( - QueryBuilders.boolQuery().must(searchSourceBuilder.query()) - .filter(QueryBuilders.existsQuery(EmailAccount.EMAIL_ACCOUNT_TYPE)) - ) - .seqNoAndPrimaryTerm(true) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(SCHEDULED_JOBS_INDEX) + searchSourceBuilder + .query( + QueryBuilders + .boolQuery() + .must(searchSourceBuilder.query()) + .filter(QueryBuilders.existsQuery(EmailAccount.EMAIL_ACCOUNT_TYPE)), + ).seqNoAndPrimaryTerm(true) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(SCHEDULED_JOBS_INDEX) return RestChannelConsumer { channel -> client.execute(SearchEmailAccountAction.INSTANCE, searchRequest, searchEmailAccountResponse(channel)) } @@ -91,14 +91,17 @@ class RestSearchEmailAccountAction : BaseRestHandler() { } for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - channel.request().xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val emailAccount = EmailAccount.parseWithType(hitsParser, hit.id, hit.version) - val xcb = emailAccount.toXContent(jsonBuilder(), EMPTY_PARAMS) - hit.sourceRef(BytesReference.bytes(xcb)) - } + XContentType.JSON + .xContent() + .createParser( + channel.request().xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + val emailAccount = EmailAccount.parseWithType(hitsParser, hit.id, hit.version) + val xcb = emailAccount.toXContent(jsonBuilder(), EMPTY_PARAMS) + hit.sourceRef(BytesReference.bytes(xcb)) + } } return BytesRestResponse(RestStatus.OK, response.toXContent(channel.newBuilder(), EMPTY_PARAMS)) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt index 1dcd44e66..30637294c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchEmailGroupAction.kt @@ -36,48 +36,48 @@ import java.io.IOException * Rest handlers to search for EmailGroup */ class RestSearchEmailGroupAction : BaseRestHandler() { + override fun getName(): String = "search_email_group_action" - override fun getName(): String { - return "search_email_group_action" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( ReplacedRoute( RestRequest.Method.POST, "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/_search", RestRequest.Method.POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/_search" + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/_search", ), ReplacedRoute( RestRequest.Method.GET, "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/_search", RestRequest.Method.GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/_search" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_EMAIL_GROUP_BASE_URI}/_search", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { val searchSourceBuilder = SearchSourceBuilder() searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()) searchSourceBuilder.fetchSource(context(request)) // An exists query is added on top of the user's query to ensure that only documents of email_group type // are searched - searchSourceBuilder.query( - QueryBuilders.boolQuery().must(searchSourceBuilder.query()) - .filter(QueryBuilders.existsQuery(EmailGroup.EMAIL_GROUP_TYPE)) - ) - .seqNoAndPrimaryTerm(true) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(SCHEDULED_JOBS_INDEX) + searchSourceBuilder + .query( + QueryBuilders + .boolQuery() + .must(searchSourceBuilder.query()) + .filter(QueryBuilders.existsQuery(EmailGroup.EMAIL_GROUP_TYPE)), + ).seqNoAndPrimaryTerm(true) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(SCHEDULED_JOBS_INDEX) return RestChannelConsumer { channel -> client.execute(SearchEmailGroupAction.INSTANCE, searchRequest, searchEmailGroupResponse(channel)) } @@ -92,14 +92,17 @@ class RestSearchEmailGroupAction : BaseRestHandler() { } for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - channel.request().xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val emailGroup = EmailGroup.parseWithType(hitsParser, hit.id, hit.version) - val xcb = emailGroup.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS) - hit.sourceRef(BytesReference.bytes(xcb)) - } + XContentType.JSON + .xContent() + .createParser( + channel.request().xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + val emailGroup = EmailGroup.parseWithType(hitsParser, hit.id, hit.version) + val xcb = emailGroup.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS) + hit.sourceRef(BytesReference.bytes(xcb)) + } } return BytesRestResponse(RestStatus.OK, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt index 6a38cb85c..ad1dde5d3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandler/RestSearchMonitorAction.kt @@ -47,43 +47,40 @@ private val log = LogManager.getLogger(RestSearchMonitorAction::class.java) */ class RestSearchMonitorAction( val settings: Settings, - clusterService: ClusterService + clusterService: ClusterService, ) : BaseRestHandler() { - @Volatile private var filterBy = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.FILTER_BY_BACKEND_ROLES) { filterBy = it } } - override fun getName(): String { - return "search_monitor_action" - } + override fun getName(): String = "search_monitor_action" - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( // Search for monitors ReplacedRoute( POST, "${AlertingPlugin.MONITOR_BASE_URI}/_search", POST, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/_search" + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/_search", ), ReplacedRoute( GET, "${AlertingPlugin.MONITOR_BASE_URI}/_search", GET, - "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/_search" - ) + "${AlertingPlugin.LEGACY_OPENDISTRO_MONITOR_BASE_URI}/_search", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_BASE_URI}/_search") val index = request.param("index", SCHEDULED_JOBS_INDEX) @@ -95,9 +92,10 @@ class RestSearchMonitorAction( searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()) searchSourceBuilder.fetchSource(context(request)) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(index) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(index) val searchMonitorRequest = SearchMonitorRequest(searchRequest) return RestChannelConsumer { channel -> @@ -116,14 +114,17 @@ class RestSearchMonitorAction( // Swallow exception and return response as is try { for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - channel.request().xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) - val xcb = monitor.toXContent(jsonBuilder(), EMPTY_PARAMS) - hit.sourceRef(BytesReference.bytes(xcb)) - } + XContentType.JSON + .xContent() + .createParser( + channel.request().xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + val monitor = ScheduledJob.parse(hitsParser, hit.id, hit.version) + val xcb = monitor.toXContent(jsonBuilder(), EMPTY_PARAMS) + hit.sourceRef(BytesReference.bytes(xcb)) + } } } catch (e: Exception) { log.error("The monitor parsing failed. Will return response as is.") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestDeleteMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestDeleteMonitorV2Action.kt index 9756dd8b0..0b084227c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestDeleteMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestDeleteMonitorV2Action.kt @@ -31,22 +31,21 @@ private val log: Logger = LogManager.getLogger(RestDeleteMonitorV2Action::class. * @opensearch.experimental */ class RestDeleteMonitorV2Action : BaseRestHandler() { + override fun getName(): String = "delete_monitor_v2_action" - override fun getName(): String { - return "delete_monitor_v2_action" - } - - override fun routes(): List { - return mutableListOf( + override fun routes(): List = + mutableListOf( Route( DELETE, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}" - ) + "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { val monitorV2Id = request.param("monitor_id") log.info("${request.method()} ${AlertingPlugin.MONITOR_V2_BASE_URI}/$monitorV2Id") diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestExecuteMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestExecuteMonitorV2Action.kt index 4c706e747..d867fca63 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestExecuteMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestExecuteMonitorV2Action.kt @@ -32,23 +32,24 @@ private val log = LogManager.getLogger(RestExecuteMonitorV2Action::class.java) * @opensearch.experimental */ class RestExecuteMonitorV2Action : BaseRestHandler() { - override fun getName(): String = "execute_monitor_v2_action" - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( POST, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}/_execute" + "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}/_execute", ), Route( POST, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/_execute" - ) + "${AlertingPlugin.MONITOR_V2_BASE_URI}/_execute", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_V2_BASE_URI}/_execute") return RestChannelConsumer { channel -> @@ -76,7 +77,5 @@ class RestExecuteMonitorV2Action : BaseRestHandler() { } } - override fun responseParams(): Set { - return setOf("dryrun", "period_end", "monitor_id") - } + override fun responseParams(): Set = setOf("dryrun", "period_end", "monitor_id") } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetAlertsV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetAlertsV2Action.kt index 560d243bc..1643169a3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetAlertsV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetAlertsV2Action.kt @@ -23,23 +23,22 @@ import org.opensearch.transport.client.node.NodeClient * @opensearch.experimental */ class RestGetAlertsV2Action : BaseRestHandler() { - private val log = LogManager.getLogger(RestGetAlertsV2Action::class.java) - override fun getName(): String { - return "get_alerts_v2_action" - } + override fun getName(): String = "get_alerts_v2_action" - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( GET, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/alerts" - ) + "${AlertingPlugin.MONITOR_V2_BASE_URI}/alerts", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_V2_BASE_URI}/alerts") val sortString = request.param("sortString", "monitor_v2_name.keyword") @@ -50,22 +49,23 @@ class RestGetAlertsV2Action : BaseRestHandler() { val searchString = request.param("searchString", "") val severityLevel = request.param("severityLevel", "ALL") val monitorId: String? = request.param("monitorId") - val table = Table( - sortOrder, - sortString, - missing, - size, - startIndex, - searchString - ) + val table = + Table( + sortOrder, + sortString, + missing, + size, + startIndex, + searchString, + ) - val getAlertsV2Request = GetAlertsV2Request( - table, - severityLevel, - monitorId?.let { listOf(monitorId) } - ) - return RestChannelConsumer { - channel -> + val getAlertsV2Request = + GetAlertsV2Request( + table, + severityLevel, + monitorId?.let { listOf(monitorId) }, + ) + return RestChannelConsumer { channel -> client.execute(GetAlertsV2Action.INSTANCE, getAlertsV2Request, RestToXContentListener(channel)) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetMonitorV2Action.kt index 5c471156c..eecd7c457 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestGetMonitorV2Action.kt @@ -29,25 +29,24 @@ private val log = LogManager.getLogger(RestGetMonitorV2Action::class.java) * @opensearch.experimental */ class RestGetMonitorV2Action : BaseRestHandler() { + override fun getName(): String = "get_monitor_v2_action" - override fun getName(): String { - return "get_monitor_v2_action" - } - - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( GET, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}" + "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}", ), Route( HEAD, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}" - ) + "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}") val monitorV2Id = request.param("monitor_id") @@ -65,8 +64,7 @@ class RestGetMonitorV2Action : BaseRestHandler() { } val getMonitorV2Request = GetMonitorV2Request(monitorV2Id, RestActions.parseVersion(request), srcContext) - return RestChannelConsumer { - channel -> + return RestChannelConsumer { channel -> client.execute(GetMonitorV2Action.INSTANCE, getMonitorV2Request, RestToXContentListener(channel)) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestIndexMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestIndexMonitorV2Action.kt index 8ff03e791..cf41cd734 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestIndexMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestIndexMonitorV2Action.kt @@ -35,25 +35,25 @@ private val log = LogManager.getLogger(RestIndexMonitorV2Action::class.java) * @opensearch.experimental */ class RestIndexMonitorV2Action : BaseRestHandler() { - override fun getName(): String { - return "index_monitor_v2_action" - } + override fun getName(): String = "index_monitor_v2_action" - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( POST, - AlertingPlugin.MONITOR_V2_BASE_URI + AlertingPlugin.MONITOR_V2_BASE_URI, ), Route( PUT, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}" - ) + "${AlertingPlugin.MONITOR_V2_BASE_URI}/{monitor_id}", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${request.path()}") val xcp = request.contentParser() @@ -71,11 +71,12 @@ class RestIndexMonitorV2Action : BaseRestHandler() { val id = request.param("monitor_id", MonitorV2.NO_ID) val seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO) val primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - val refreshPolicy = if (request.hasParam(REFRESH)) { - WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - } else { - WriteRequest.RefreshPolicy.IMMEDIATE - } + val refreshPolicy = + if (request.hasParam(REFRESH)) { + WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + } else { + WriteRequest.RefreshPolicy.IMMEDIATE + } val indexMonitorV2Request = IndexMonitorV2Request(id, seqNo, primaryTerm, refreshPolicy, request.method(), monitorV2, rbacRoles) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestSearchMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestSearchMonitorV2Action.kt index a179078f8..3f442a9b9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestSearchMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/resthandlerv2/RestSearchMonitorV2Action.kt @@ -47,41 +47,41 @@ class RestSearchMonitorV2Action( val settings: Settings, clusterService: ClusterService, ) : BaseRestHandler() { - @Volatile private var filterBy = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) init { clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.FILTER_BY_BACKEND_ROLES) { filterBy = it } } - override fun getName(): String { - return "search_monitor_v2_action" - } + override fun getName(): String = "search_monitor_v2_action" - override fun routes(): List { - return listOf( + override fun routes(): List = + listOf( Route( POST, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/_search" + "${AlertingPlugin.MONITOR_V2_BASE_URI}/_search", ), Route( GET, - "${AlertingPlugin.MONITOR_V2_BASE_URI}/_search" - ) + "${AlertingPlugin.MONITOR_V2_BASE_URI}/_search", + ), ) - } @Throws(IOException::class) - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { log.debug("${request.method()} ${AlertingPlugin.MONITOR_V2_BASE_URI}/_search") val searchSourceBuilder = SearchSourceBuilder() searchSourceBuilder.parseXContent(request.contentOrSourceParamParser()) searchSourceBuilder.fetchSource(context(request)) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(SCHEDULED_JOBS_INDEX) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(SCHEDULED_JOBS_INDEX) val searchMonitorV2Request = SearchMonitorV2Request(searchRequest) return RestChannelConsumer { channel -> @@ -101,21 +101,24 @@ class RestSearchMonitorV2Action( try { for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - channel.request().xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - // when reconstructing XContent, intentionally leave out - // user field in response for security reasons by - // calling ScheduledJob.toXContent instead of - // a MonitorV2's toXContentWithUser - val monitorV2 = ScheduledJob.parse(hitsParser, hit.id, hit.version) - val xcb = monitorV2.toXContent(jsonBuilder(), EMPTY_PARAMS) + XContentType.JSON + .xContent() + .createParser( + channel.request().xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + // when reconstructing XContent, intentionally leave out + // user field in response for security reasons by + // calling ScheduledJob.toXContent instead of + // a MonitorV2's toXContentWithUser + val monitorV2 = ScheduledJob.parse(hitsParser, hit.id, hit.version) + val xcb = monitorV2.toXContent(jsonBuilder(), EMPTY_PARAMS) - // rewrite the search hit as just the MonitorV2 source, - // without the extra "monitor_v2" JSON object wrapper - hit.sourceRef(BytesReference.bytes(xcb)) - } + // rewrite the search hit as just the MonitorV2 source, + // without the extra "monitor_v2" JSON object wrapper + hit.sourceRef(BytesReference.bytes(xcb)) + } } } catch (e: Exception) { // Swallow exception and return response as is diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt index fe6e382f8..f12212d9e 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/BucketLevelTriggerExecutionContext.kt @@ -24,19 +24,25 @@ data class BucketLevelTriggerExecutionContext( val dedupedAlerts: List = listOf(), val newAlerts: List = listOf(), val completedAlerts: List = listOf(), - override val error: Exception? = null + override val error: Exception? = null, ) : TriggerExecutionContext(monitor, results, periodStart, periodEnd, error) { - constructor( monitor: Monitor, trigger: BucketLevelTrigger, monitorRunResult: MonitorRunResult, dedupedAlerts: List = listOf(), newAlerts: List = listOf(), - completedAlerts: List = listOf() + completedAlerts: List = listOf(), ) : this( - monitor, trigger, monitorRunResult.inputResults.results, monitorRunResult.periodStart, monitorRunResult.periodEnd, - dedupedAlerts, newAlerts, completedAlerts, monitorRunResult.scriptContextError(trigger) + monitor, + trigger, + monitorRunResult.inputResults.results, + monitorRunResult.periodStart, + monitorRunResult.periodEnd, + dedupedAlerts, + newAlerts, + completedAlerts, + monitorRunResult.scriptContextError(trigger), ) /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/ChainedAlertTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/ChainedAlertTriggerExecutionContext.kt index d7357abeb..1c101cb30 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/ChainedAlertTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/ChainedAlertTriggerExecutionContext.kt @@ -20,21 +20,19 @@ data class ChainedAlertTriggerExecutionContext( val trigger: ChainedAlertTrigger, val alertGeneratingMonitors: Set, val monitorIdToAlertIdsMap: Map>, - val alert: Alert? = null + val alert: Alert? = null, ) { - /** * Mustache templates need special permissions to reflectively introspect field names. To avoid doing this we * translate the context to a Map of Strings to primitive types, which can be accessed without reflection. */ - open fun asTemplateArg(): Map { - return mapOf( + open fun asTemplateArg(): Map = + mapOf( "monitor" to workflow.asTemplateArg(), "results" to workflowRunResult, "periodStart" to periodStart, "error" to error, "alertGeneratingMonitors" to alertGeneratingMonitors, - "monitorIdToAlertIdsMap" to monitorIdToAlertIdsMap + "monitorIdToAlertIdsMap" to monitorIdToAlertIdsMap, ) - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt index 543e6bdf7..dff7ed0f2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/DocumentLevelTriggerExecutionContext.kt @@ -19,16 +19,22 @@ data class DocumentLevelTriggerExecutionContext( val alerts: List = listOf(), val triggeredDocs: List, val relatedFindings: List, - override val error: Exception? = null + override val error: Exception? = null, ) : TriggerExecutionContext(monitor, results, periodStart, periodEnd, error) { - constructor( monitor: Monitor, trigger: DocumentLevelTrigger, - alerts: List = listOf() + alerts: List = listOf(), ) : this( - monitor, trigger, emptyList(), Instant.now(), Instant.now(), - alerts, emptyList(), emptyList(), null + monitor, + trigger, + emptyList(), + Instant.now(), + Instant.now(), + alerts, + emptyList(), + emptyList(), + null, ) /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/PPLTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/PPLTriggerExecutionContext.kt index 1b95da951..f66830cc0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/PPLTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/PPLTriggerExecutionContext.kt @@ -15,9 +15,8 @@ data class PPLTriggerExecutionContext( override val monitorV2: PPLSQLMonitor, override val error: Exception? = null, val pplTrigger: PPLSQLTrigger, - var pplQueryResults: JSONObject // can be a full set of PPL query results, or an individual result row + var pplQueryResults: JSONObject, // can be a full set of PPL query results, or an individual result row ) : TriggerV2ExecutionContext(monitorV2, error) { - override fun asTemplateArg(): Map { val templateArg = super.asTemplateArg().toMutableMap() templateArg[PPL_SQL_TRIGGER_FIELD] = pplTrigger.asTemplateArg() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt index 9d934ef3f..ba8df3b54 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/QueryLevelTriggerExecutionContext.kt @@ -19,17 +19,21 @@ data class QueryLevelTriggerExecutionContext( override val periodStart: Instant, override val periodEnd: Instant, val alert: AlertContext? = null, - override val error: Exception? = null + override val error: Exception? = null, ) : TriggerExecutionContext(monitor, results, periodStart, periodEnd, error) { - constructor( monitor: Monitor, trigger: QueryLevelTrigger, monitorRunResult: MonitorRunResult, - alertContext: AlertContext? = null + alertContext: AlertContext? = null, ) : this( - monitor, trigger, monitorRunResult.inputResults.results, monitorRunResult.periodStart, monitorRunResult.periodEnd, - alertContext, monitorRunResult.scriptContextError(trigger) + monitor, + trigger, + monitorRunResult.inputResults.results, + monitorRunResult.periodStart, + monitorRunResult.periodEnd, + alertContext, + monitorRunResult.scriptContextError(trigger), ) /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt index dbfd5f271..084dd2292 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerExecutionContext.kt @@ -15,26 +15,27 @@ abstract class TriggerExecutionContext( open val results: List>, open val periodStart: Instant, open val periodEnd: Instant, - open val error: Exception? = null + open val error: Exception? = null, ) { - constructor(monitor: Monitor, trigger: Trigger, monitorRunResult: MonitorRunResult<*>) : this( - monitor, monitorRunResult.inputResults.results, monitorRunResult.periodStart, - monitorRunResult.periodEnd, monitorRunResult.scriptContextError(trigger) + monitor, + monitorRunResult.inputResults.results, + monitorRunResult.periodStart, + monitorRunResult.periodEnd, + monitorRunResult.scriptContextError(trigger), ) /** * Mustache templates need special permissions to reflectively introspect field names. To avoid doing this we * translate the context to a Map of Strings to primitive types, which can be accessed without reflection. */ - open fun asTemplateArg(): Map { - return mapOf( + open fun asTemplateArg(): Map = + mapOf( "monitor" to monitor.asTemplateArg(), "results" to results, "periodStart" to periodStart, "periodEnd" to periodEnd, - "error" to error + "error" to error, ) - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerScript.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerScript.kt index a6896d004..fccdf1ba9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerScript.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerScript.kt @@ -8,8 +8,9 @@ package org.opensearch.alerting.script import org.opensearch.script.Script import org.opensearch.script.ScriptContext -abstract class TriggerScript(_scriptParams: Map) { - +abstract class TriggerScript( + _scriptParams: Map, +) { /** * [scriptParams] are the [user-defined parameters][Script.getParams] specified in the script definition. * The [scriptParams] are defined when the script is compiled and DON'T change every time the script executes. This field @@ -17,7 +18,8 @@ abstract class TriggerScript(_scriptParams: Map) { * painless script context we surface it to the painless script as just `params` using a custom getter name. */ val scriptParams: Map = _scriptParams - @JvmName("getParams") get + @JvmName("getParams") + get companion object { /** diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerV2ExecutionContext.kt b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerV2ExecutionContext.kt index 97384845c..9c4ec9d43 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerV2ExecutionContext.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/script/TriggerV2ExecutionContext.kt @@ -9,13 +9,11 @@ import org.opensearch.alerting.modelv2.MonitorV2 abstract class TriggerV2ExecutionContext( open val monitorV2: MonitorV2, - open val error: Exception? = null + open val error: Exception? = null, ) { - - open fun asTemplateArg(): Map { - return mapOf( + open fun asTemplateArg(): Map = + mapOf( "monitorV2" to monitorV2.asTemplateArg(), - "error" to error + "error" to error, ) - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/service/DeleteMonitorService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/service/DeleteMonitorService.kt index c1a56a4c6..c9f4c7c4f 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/service/DeleteMonitorService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/service/DeleteMonitorService.kt @@ -56,7 +56,7 @@ object DeleteMonitorService : fun initialize( client: Client, - lockService: LockService + lockService: LockService, ) { DeleteMonitorService.client = client DeleteMonitorService.lockService = lockService @@ -67,7 +67,10 @@ object DeleteMonitorService : * @param monitor monitor to be deleted * @param refreshPolicy */ - suspend fun deleteMonitor(monitor: Monitor, refreshPolicy: RefreshPolicy): DeleteMonitorResponse { + suspend fun deleteMonitor( + monitor: Monitor, + refreshPolicy: RefreshPolicy, + ): DeleteMonitorResponse { val deleteResponse = deleteMonitor(monitor.id, refreshPolicy) deleteDocLevelMonitorQueriesAndIndices(monitor) deleteMetadata(monitor) @@ -81,22 +84,30 @@ object DeleteMonitorService : * @param monitorV2Id monitorV2 ID to be deleted * @param refreshPolicy */ - suspend fun deleteMonitorV2(monitorV2Id: String, refreshPolicy: RefreshPolicy): DeleteMonitorV2Response { + suspend fun deleteMonitorV2( + monitorV2Id: String, + refreshPolicy: RefreshPolicy, + ): DeleteMonitorV2Response { val deleteResponse = deleteMonitor(monitorV2Id, refreshPolicy) deleteLock(monitorV2Id) return DeleteMonitorV2Response(deleteResponse.id, deleteResponse.version) } // both Alerting v1 and v2 workflows flow through this function - private suspend fun deleteMonitor(monitorId: String, refreshPolicy: RefreshPolicy): DeleteResponse { - val deleteMonitorRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) - .setRefreshPolicy(refreshPolicy) + private suspend fun deleteMonitor( + monitorId: String, + refreshPolicy: RefreshPolicy, + ): DeleteResponse { + val deleteMonitorRequest = + DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) + .setRefreshPolicy(refreshPolicy) return client.suspendUntil { delete(deleteMonitorRequest, it) } } private suspend fun deleteMetadata(monitor: Monitor) { - val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, "${monitor.id}-metadata") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + val deleteRequest = + DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, "${monitor.id}-metadata") + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) try { val deleteResponse: DeleteResponse = client.suspendUntil { delete(deleteRequest, it) } log.debug("Monitor metadata: ${deleteResponse.id} deletion result: ${deleteResponse.result}") @@ -122,53 +133,59 @@ object DeleteMonitorService : } // Check if there's any queries from other monitors in this queryIndex, // to avoid unnecessary doc deletion, if we could just delete index completely - val searchResponse: SearchResponse = client.suspendUntil { - search( - SearchRequest(queryIndex).source( - SearchSourceBuilder() - .size(0) - .query( - QueryBuilders.boolQuery().mustNot( - QueryBuilders.matchQuery("monitor_id", monitor.id) - ) - ) - ).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), - it - ) - } - if (searchResponse.hits.totalHits.value == 0L) { - val ack: AcknowledgedResponse = client.suspendUntil { - client.admin().indices().delete( - DeleteIndexRequest(queryIndex).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), - it + val searchResponse: SearchResponse = + client.suspendUntil { + search( + SearchRequest(queryIndex) + .source( + SearchSourceBuilder() + .size(0) + .query( + QueryBuilders.boolQuery().mustNot( + QueryBuilders.matchQuery("monitor_id", monitor.id), + ), + ), + ).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), + it, ) } + if (searchResponse.hits.totalHits.value == 0L) { + val ack: AcknowledgedResponse = + client.suspendUntil { + client.admin().indices().delete( + DeleteIndexRequest(queryIndex).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), + it, + ) + } if (ack.isAcknowledged == false) { log.error("Deletion of concrete queryIndex:$queryIndex is not ack'd!") } } else { // Delete all queries added by this monitor - val response: BulkByScrollResponse = suspendCoroutine { cont -> - DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) - .source(queryIndex) - .filter(QueryBuilders.matchQuery("monitor_id", monitor.id)) - .refresh(true) - .execute( - object : ActionListener { - override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) - override fun onFailure(t: Exception) = cont.resumeWithException(t) - } - ) - } + val response: BulkByScrollResponse = + suspendCoroutine { cont -> + DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + .source(queryIndex) + .filter(QueryBuilders.matchQuery("monitor_id", monitor.id)) + .refresh(true) + .execute( + object : ActionListener { + override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) + + override fun onFailure(t: Exception) = cont.resumeWithException(t) + }, + ) + } } } } else { - val ack: AcknowledgedResponse = client.suspendUntil { - client.admin().indices().delete( - DeleteIndexRequest(monitor.dataSources.queryIndex).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), - it - ) - } + val ack: AcknowledgedResponse = + client.suspendUntil { + client.admin().indices().delete( + DeleteIndexRequest(monitor.dataSources.queryIndex).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), + it, + ) + } if (ack.isAcknowledged == false) { log.error("Deletion of concrete queryIndex:${monitor.dataSources.queryIndex} is not ack'd!") } @@ -195,20 +212,22 @@ object DeleteMonitorService : * @param monitorId id of monitor that is checked if it is a workflow delegate */ suspend fun monitorIsWorkflowDelegate(monitorId: String): Boolean { - val queryBuilder = QueryBuilders.nestedQuery( - WORKFLOW_DELEGATE_PATH, - QueryBuilders.boolQuery().must( - QueryBuilders.matchQuery( - WORKFLOW_MONITOR_PATH, - monitorId - ) - ), - ScoreMode.None - ) + val queryBuilder = + QueryBuilders.nestedQuery( + WORKFLOW_DELEGATE_PATH, + QueryBuilders.boolQuery().must( + QueryBuilders.matchQuery( + WORKFLOW_MONITOR_PATH, + monitorId, + ), + ), + ScoreMode.None, + ) try { - val searchRequest = SearchRequest() - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) - .source(SearchSourceBuilder().query(queryBuilder)) + val searchRequest = + SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .source(SearchSourceBuilder().query(queryBuilder)) client.threadPool().threadContext.stashContext().use { val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } @@ -216,7 +235,10 @@ object DeleteMonitorService : return false } - val workflowIds = searchResponse.hits.hits.map { it.id }.joinToString() + val workflowIds = + searchResponse.hits.hits + .map { it.id } + .joinToString() log.info("Monitor $monitorId can't be deleted since it belongs to $workflowIds") return true } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt index 9f89db235..146e17256 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/AlertingSettings.kt @@ -14,7 +14,6 @@ import java.util.concurrent.TimeUnit * settings specific to [AlertingPlugin]. These settings include things like history index max age, request timeout, etc... */ class AlertingSettings { - companion object { const val DEFAULT_MAX_ACTIONABLE_ALERT_COUNT = 50L const val DEFAULT_FINDINGS_INDEXING_BATCH_SIZE = 1000 @@ -25,48 +24,58 @@ class AlertingSettings { const val DEFAULT_MAX_DOC_LEVEL_MONITOR_EXECUTION_MAX_DURATION_MINUTES = 4L const val DEFAULT_FAN_OUT_NODES = 1000 - val ALERTING_MAX_MONITORS = Setting.intSetting( - "plugins.alerting.monitor.max_monitors", - LegacyOpenDistroAlertingSettings.ALERTING_MAX_MONITORS, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val ALERTING_MAX_MONITORS = + Setting.intSetting( + "plugins.alerting.monitor.max_monitors", + LegacyOpenDistroAlertingSettings.ALERTING_MAX_MONITORS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) /** Defines the threshold percentage of heap size in bytes till which we accumulate docs in memory before we query against percolate query * index in document level monitor execution. */ - val PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT = Setting.intSetting( - "plugins.alerting.monitor.percolate_query_docs_size_memory_percentage_limit", - 10, - 0, - 100, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT = + Setting.intSetting( + "plugins.alerting.monitor.percolate_query_docs_size_memory_percentage_limit", + 10, + 0, + 100, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) /** Purely a setting used to verify seq_no calculation */ - val DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE = Setting.intSetting( - "plugins.alerting.monitor.doc_level_monitor_shard_fetch_size", - DEFAULT_DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE, - 1, - 10000, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE = + Setting.intSetting( + "plugins.alerting.monitor.doc_level_monitor_shard_fetch_size", + DEFAULT_DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE, + 1, + 10000, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) /** Setting to help timebox doc level monitor fanout */ - val DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION = Setting.positiveTimeSetting( - "plugins.alerting.monitor.doc_level_monitor_fanout_max_duration", - TimeValue.timeValueMinutes(DEFAULT_MAX_DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION_MINUTES), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION = + Setting.positiveTimeSetting( + "plugins.alerting.monitor.doc_level_monitor_fanout_max_duration", + TimeValue.timeValueMinutes(DEFAULT_MAX_DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION_MINUTES), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) /** Setting to help timebox doc level monitor execution */ - val DOC_LEVEL_MONITOR_EXECUTION_MAX_DURATION = Setting.positiveTimeSetting( - "plugins.alerting.monitor.doc_level_monitor_execution_max_duration", - TimeValue.timeValueMinutes(DEFAULT_MAX_DOC_LEVEL_MONITOR_EXECUTION_MAX_DURATION_MINUTES), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val DOC_LEVEL_MONITOR_EXECUTION_MAX_DURATION = + Setting.positiveTimeSetting( + "plugins.alerting.monitor.doc_level_monitor_execution_max_duration", + TimeValue.timeValueMinutes(DEFAULT_MAX_DOC_LEVEL_MONITOR_EXECUTION_MAX_DURATION_MINUTES), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) /** Defines the threshold of the maximum number of docs accumulated in memory to query against percolate query index in document * level monitor execution. The docs are being collected from searching on shards of indices mentioned in the @@ -74,333 +83,435 @@ class AlertingSettings { * query with the current set of docs and clear the cache and repeat the process till we have queried all indices in current * execution */ - val PERCOLATE_QUERY_MAX_NUM_DOCS_IN_MEMORY = Setting.intSetting( - "plugins.alerting.monitor.percolate_query_max_num_docs_in_memory", - DEFAULT_PERCOLATE_QUERY_NUM_DOCS_IN_MEMORY, 1000, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val PERCOLATE_QUERY_MAX_NUM_DOCS_IN_MEMORY = + Setting.intSetting( + "plugins.alerting.monitor.percolate_query_max_num_docs_in_memory", + DEFAULT_PERCOLATE_QUERY_NUM_DOCS_IN_MEMORY, + 1000, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) /** * Boolean setting to enable/disable optimizing doc level monitors by fetchign only fields mentioned in queries. * Enabled by default. If disabled, will fetch entire source of documents while fetch data from shards. */ - val DOC_LEVEL_MONITOR_FETCH_ONLY_QUERY_FIELDS_ENABLED = Setting.boolSetting( - "plugins.alerting.monitor.doc_level_monitor_query_field_names_enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val INPUT_TIMEOUT = Setting.positiveTimeSetting( - "plugins.alerting.input_timeout", - LegacyOpenDistroAlertingSettings.INPUT_TIMEOUT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val INDEX_TIMEOUT = Setting.positiveTimeSetting( - "plugins.alerting.index_timeout", - LegacyOpenDistroAlertingSettings.INDEX_TIMEOUT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val BULK_TIMEOUT = Setting.positiveTimeSetting( - "plugins.alerting.bulk_timeout", - LegacyOpenDistroAlertingSettings.BULK_TIMEOUT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "plugins.alerting.alert_backoff_millis", - LegacyOpenDistroAlertingSettings.ALERT_BACKOFF_MILLIS, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_BACKOFF_COUNT = Setting.intSetting( - "plugins.alerting.alert_backoff_count", - LegacyOpenDistroAlertingSettings.ALERT_BACKOFF_COUNT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val MOVE_ALERTS_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "plugins.alerting.move_alerts_backoff_millis", - LegacyOpenDistroAlertingSettings.MOVE_ALERTS_BACKOFF_MILLIS, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val MOVE_ALERTS_BACKOFF_COUNT = Setting.intSetting( - "plugins.alerting.move_alerts_backoff_count", - LegacyOpenDistroAlertingSettings.MOVE_ALERTS_BACKOFF_COUNT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_HISTORY_ENABLED = Setting.boolSetting( - "plugins.alerting.alert_history_enabled", - LegacyOpenDistroAlertingSettings.ALERT_HISTORY_ENABLED, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val DOC_LEVEL_MONITOR_FETCH_ONLY_QUERY_FIELDS_ENABLED = + Setting.boolSetting( + "plugins.alerting.monitor.doc_level_monitor_query_field_names_enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val INPUT_TIMEOUT = + Setting.positiveTimeSetting( + "plugins.alerting.input_timeout", + LegacyOpenDistroAlertingSettings.INPUT_TIMEOUT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val INDEX_TIMEOUT = + Setting.positiveTimeSetting( + "plugins.alerting.index_timeout", + LegacyOpenDistroAlertingSettings.INDEX_TIMEOUT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val BULK_TIMEOUT = + Setting.positiveTimeSetting( + "plugins.alerting.bulk_timeout", + LegacyOpenDistroAlertingSettings.BULK_TIMEOUT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_BACKOFF_MILLIS = + Setting.positiveTimeSetting( + "plugins.alerting.alert_backoff_millis", + LegacyOpenDistroAlertingSettings.ALERT_BACKOFF_MILLIS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_BACKOFF_COUNT = + Setting.intSetting( + "plugins.alerting.alert_backoff_count", + LegacyOpenDistroAlertingSettings.ALERT_BACKOFF_COUNT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val MOVE_ALERTS_BACKOFF_MILLIS = + Setting.positiveTimeSetting( + "plugins.alerting.move_alerts_backoff_millis", + LegacyOpenDistroAlertingSettings.MOVE_ALERTS_BACKOFF_MILLIS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val MOVE_ALERTS_BACKOFF_COUNT = + Setting.intSetting( + "plugins.alerting.move_alerts_backoff_count", + LegacyOpenDistroAlertingSettings.MOVE_ALERTS_BACKOFF_COUNT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_HISTORY_ENABLED = + Setting.boolSetting( + "plugins.alerting.alert_history_enabled", + LegacyOpenDistroAlertingSettings.ALERT_HISTORY_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) // TODO: Do we want to let users to disable this? If so, we need to fix the rollover logic // such that the main index is findings and rolls over to the finding history index - val FINDING_HISTORY_ENABLED = Setting.boolSetting( - "plugins.alerting.alert_finding_enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_HISTORY_ROLLOVER_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.alert_history_rollover_period", - LegacyOpenDistroAlertingSettings.ALERT_HISTORY_ROLLOVER_PERIOD, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val FINDING_HISTORY_ROLLOVER_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.alert_finding_rollover_period", - TimeValue.timeValueHours(12), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_HISTORY_INDEX_MAX_AGE = Setting.positiveTimeSetting( - "plugins.alerting.alert_history_max_age", - LegacyOpenDistroAlertingSettings.ALERT_HISTORY_INDEX_MAX_AGE, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val FINDING_HISTORY_INDEX_MAX_AGE = Setting.positiveTimeSetting( - "plugins.alerting.finding_history_max_age", - TimeValue(30, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_HISTORY_MAX_DOCS = Setting.longSetting( - "plugins.alerting.alert_history_max_docs", - LegacyOpenDistroAlertingSettings.ALERT_HISTORY_MAX_DOCS, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val FINDING_HISTORY_MAX_DOCS = Setting.longSetting( - "plugins.alerting.alert_finding_max_docs", - 1000L, - 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_HISTORY_RETENTION_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.alert_history_retention_period", - LegacyOpenDistroAlertingSettings.ALERT_HISTORY_RETENTION_PERIOD, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val FINDING_HISTORY_RETENTION_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.finding_history_retention_period", - TimeValue(60, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val REQUEST_TIMEOUT = Setting.positiveTimeSetting( - "plugins.alerting.request_timeout", - LegacyOpenDistroAlertingSettings.REQUEST_TIMEOUT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val MAX_ACTION_THROTTLE_VALUE = Setting.positiveTimeSetting( - "plugins.alerting.action_throttle_max_value", - LegacyOpenDistroAlertingSettings.MAX_ACTION_THROTTLE_VALUE, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val FILTER_BY_BACKEND_ROLES = Setting.boolSetting( - "plugins.alerting.filter_by_backend_roles", - LegacyOpenDistroAlertingSettings.FILTER_BY_BACKEND_ROLES, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val MAX_ACTIONABLE_ALERT_COUNT = Setting.longSetting( - "plugins.alerting.max_actionable_alert_count", - DEFAULT_MAX_ACTIONABLE_ALERT_COUNT, - -1L, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - - val CROSS_CLUSTER_MONITORING_ENABLED = Setting.boolSetting( - "plugins.alerting.cross_cluster_monitoring_enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val FINDINGS_INDEXING_BATCH_SIZE = Setting.intSetting( - "plugins.alerting.alert_findings_indexing_batch_size", - DEFAULT_FINDINGS_INDEXING_BATCH_SIZE, - 1, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val DOC_LEVEL_MONITOR_FAN_OUT_NODES = Setting.intSetting( - "plugins.alerting.monitor.doc_level_monitor_fan_out_nodes", - DEFAULT_FAN_OUT_NODES, - 1, - Int.MAX_VALUE, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERTING_COMMENTS_ENABLED = Setting.boolSetting( - "plugins.alerting.comments_enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val COMMENTS_HISTORY_MAX_DOCS = Setting.longSetting( - "plugins.alerting.comments_history_max_docs", - 1000L, - 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val COMMENTS_HISTORY_INDEX_MAX_AGE = Setting.positiveTimeSetting( - "plugins.alerting.comments_history_max_age", - TimeValue(30, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val COMMENTS_HISTORY_ROLLOVER_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.comments_history_rollover_period", - TimeValue(12, TimeUnit.HOURS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val COMMENTS_HISTORY_RETENTION_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.comments_history_retention_period", - TimeValue(60, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val COMMENTS_MAX_CONTENT_SIZE = Setting.longSetting( - "plugins.alerting.max_comment_character_length", - 2000L, - 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val MAX_COMMENTS_PER_ALERT = Setting.longSetting( - "plugins.alerting.max_comments_per_alert", - 500L, - 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val MAX_COMMENTS_PER_NOTIFICATION = Setting.intSetting( - "plugins.alerting.max_comments_per_notification", - 3, - 0, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_V2_HISTORY_ENABLED = Setting.boolSetting( - "plugins.alerting.v2.alert_history_enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_V2_HISTORY_ROLLOVER_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.v2.alert_history_rollover_period", - TimeValue(12, TimeUnit.HOURS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_V2_HISTORY_INDEX_MAX_AGE = Setting.positiveTimeSetting( - "plugins.alerting.v2.alert_history_max_age", - TimeValue(30, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_V2_HISTORY_MAX_DOCS = Setting.longSetting( - "plugins.alerting.v2.alert_history_max_docs", - 1000L, 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_V2_HISTORY_RETENTION_PERIOD = Setting.positiveTimeSetting( - "plugins.alerting.v2.alert_history_retention_period", - TimeValue(60, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_V2_MONITOR_EXECUTION_MAX_DURATION = Setting.positiveTimeSetting( - "plugins.alerting.v2.alert_monitor_execution_max_duration", - TimeValue(4, TimeUnit.MINUTES), - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERTING_V2_MAX_MONITORS = Setting.intSetting( - "plugins.alerting.v2.monitor.max_monitors", - 1000, - 1, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERTING_V2_MAX_THROTTLE_DURATION = Setting.longSetting( - "plugins.alerting.v2.monitor.max_throttle_duration", - 7200L, // 5 days, 7200 minutes - 2L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERTING_V2_MAX_EXPIRE_DURATION = Setting.longSetting( - "plugins.alerting.v2.monitor.max_expire_duration", - 43200L, // 30 days, 43200 minutes - 2L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERTING_V2_MAX_LOOK_BACK_WINDOW = Setting.longSetting( - "plugins.alerting.v2.monitor.max_look_back_window", - 10080L, // 7 days, 10080 minutes - 2L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERTING_V2_MAX_QUERY_LENGTH = Setting.longSetting( - "plugins.alerting.v2.monitor.max_query_length", - 2000L, - 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val FINDING_HISTORY_ENABLED = + Setting.boolSetting( + "plugins.alerting.alert_finding_enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_HISTORY_ROLLOVER_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.alert_history_rollover_period", + LegacyOpenDistroAlertingSettings.ALERT_HISTORY_ROLLOVER_PERIOD, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val FINDING_HISTORY_ROLLOVER_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.alert_finding_rollover_period", + TimeValue.timeValueHours(12), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_HISTORY_INDEX_MAX_AGE = + Setting.positiveTimeSetting( + "plugins.alerting.alert_history_max_age", + LegacyOpenDistroAlertingSettings.ALERT_HISTORY_INDEX_MAX_AGE, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val FINDING_HISTORY_INDEX_MAX_AGE = + Setting.positiveTimeSetting( + "plugins.alerting.finding_history_max_age", + TimeValue(30, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_HISTORY_MAX_DOCS = + Setting.longSetting( + "plugins.alerting.alert_history_max_docs", + LegacyOpenDistroAlertingSettings.ALERT_HISTORY_MAX_DOCS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val FINDING_HISTORY_MAX_DOCS = + Setting.longSetting( + "plugins.alerting.alert_finding_max_docs", + 1000L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_HISTORY_RETENTION_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.alert_history_retention_period", + LegacyOpenDistroAlertingSettings.ALERT_HISTORY_RETENTION_PERIOD, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val FINDING_HISTORY_RETENTION_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.finding_history_retention_period", + TimeValue(60, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val REQUEST_TIMEOUT = + Setting.positiveTimeSetting( + "plugins.alerting.request_timeout", + LegacyOpenDistroAlertingSettings.REQUEST_TIMEOUT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val MAX_ACTION_THROTTLE_VALUE = + Setting.positiveTimeSetting( + "plugins.alerting.action_throttle_max_value", + LegacyOpenDistroAlertingSettings.MAX_ACTION_THROTTLE_VALUE, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val FILTER_BY_BACKEND_ROLES = + Setting.boolSetting( + "plugins.alerting.filter_by_backend_roles", + LegacyOpenDistroAlertingSettings.FILTER_BY_BACKEND_ROLES, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val MAX_ACTIONABLE_ALERT_COUNT = + Setting.longSetting( + "plugins.alerting.max_actionable_alert_count", + DEFAULT_MAX_ACTIONABLE_ALERT_COUNT, + -1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val CROSS_CLUSTER_MONITORING_ENABLED = + Setting.boolSetting( + "plugins.alerting.cross_cluster_monitoring_enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val FINDINGS_INDEXING_BATCH_SIZE = + Setting.intSetting( + "plugins.alerting.alert_findings_indexing_batch_size", + DEFAULT_FINDINGS_INDEXING_BATCH_SIZE, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val DOC_LEVEL_MONITOR_FAN_OUT_NODES = + Setting.intSetting( + "plugins.alerting.monitor.doc_level_monitor_fan_out_nodes", + DEFAULT_FAN_OUT_NODES, + 1, + Int.MAX_VALUE, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERTING_COMMENTS_ENABLED = + Setting.boolSetting( + "plugins.alerting.comments_enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val COMMENTS_HISTORY_MAX_DOCS = + Setting.longSetting( + "plugins.alerting.comments_history_max_docs", + 1000L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val COMMENTS_HISTORY_INDEX_MAX_AGE = + Setting.positiveTimeSetting( + "plugins.alerting.comments_history_max_age", + TimeValue(30, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val COMMENTS_HISTORY_ROLLOVER_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.comments_history_rollover_period", + TimeValue(12, TimeUnit.HOURS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val COMMENTS_HISTORY_RETENTION_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.comments_history_retention_period", + TimeValue(60, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val COMMENTS_MAX_CONTENT_SIZE = + Setting.longSetting( + "plugins.alerting.max_comment_character_length", + 2000L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val MAX_COMMENTS_PER_ALERT = + Setting.longSetting( + "plugins.alerting.max_comments_per_alert", + 500L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val MAX_COMMENTS_PER_NOTIFICATION = + Setting.intSetting( + "plugins.alerting.max_comments_per_notification", + 3, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_V2_HISTORY_ENABLED = + Setting.boolSetting( + "plugins.alerting.v2.alert_history_enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_V2_HISTORY_ROLLOVER_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.v2.alert_history_rollover_period", + TimeValue(12, TimeUnit.HOURS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_V2_HISTORY_INDEX_MAX_AGE = + Setting.positiveTimeSetting( + "plugins.alerting.v2.alert_history_max_age", + TimeValue(30, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_V2_HISTORY_MAX_DOCS = + Setting.longSetting( + "plugins.alerting.v2.alert_history_max_docs", + 1000L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_V2_HISTORY_RETENTION_PERIOD = + Setting.positiveTimeSetting( + "plugins.alerting.v2.alert_history_retention_period", + TimeValue(60, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_V2_MONITOR_EXECUTION_MAX_DURATION = + Setting.positiveTimeSetting( + "plugins.alerting.v2.alert_monitor_execution_max_duration", + TimeValue(4, TimeUnit.MINUTES), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERTING_V2_MAX_MONITORS = + Setting.intSetting( + "plugins.alerting.v2.monitor.max_monitors", + 1000, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERTING_V2_MAX_THROTTLE_DURATION = + Setting.longSetting( + "plugins.alerting.v2.monitor.max_throttle_duration", + 7200L, // 5 days, 7200 minutes + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERTING_V2_MAX_EXPIRE_DURATION = + Setting.longSetting( + "plugins.alerting.v2.monitor.max_expire_duration", + 43200L, // 30 days, 43200 minutes + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERTING_V2_MAX_LOOK_BACK_WINDOW = + Setting.longSetting( + "plugins.alerting.v2.monitor.max_look_back_window", + 10080L, // 7 days, 10080 minutes + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERTING_V2_MAX_QUERY_LENGTH = + Setting.longSetting( + "plugins.alerting.v2.monitor.max_query_length", + 2000L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) // max data rows to retrieve when executing PPL query against // SQL/PPL plugin during monitor execution - val ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS = Setting.longSetting( - "plugins.alerting.v2.query_results_max_datarows", - 10000L, - 1L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS = + Setting.longSetting( + "plugins.alerting.v2.query_results_max_datarows", + 10000L, + 1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) // max size of query results to store in alerts and notifications - val ALERT_V2_QUERY_RESULTS_MAX_SIZE = Setting.longSetting( - "plugins.alerting.v2.query_results_max_size", - 3000L, - 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val ALERT_V2_PER_RESULT_TRIGGER_MAX_ALERTS = Setting.intSetting( - "plugins.alerting.v2.per_result_trigger_max_alerts", - 10, - 1, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH = Setting.intSetting( - "plugins.alerting.v2.notification_subject_source_max_length", - 1000, - 100, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH = Setting.intSetting( - "plugins.alerting.v2.notification_message_source_max_length", - 3000, - 1000, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val ALERT_V2_QUERY_RESULTS_MAX_SIZE = + Setting.longSetting( + "plugins.alerting.v2.query_results_max_size", + 3000L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val ALERT_V2_PER_RESULT_TRIGGER_MAX_ALERTS = + Setting.intSetting( + "plugins.alerting.v2.per_result_trigger_max_alerts", + 10, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH = + Setting.intSetting( + "plugins.alerting.v2.notification_subject_source_max_length", + 1000, + 100, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH = + Setting.intSetting( + "plugins.alerting.v2.notification_message_source_max_length", + 3000, + 1000, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/DestinationSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/DestinationSettings.kt index 14086ce68..0e35cb7f2 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/DestinationSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/DestinationSettings.kt @@ -18,50 +18,53 @@ import java.util.function.Function */ class DestinationSettings { companion object { - const val DESTINATION_SETTING_PREFIX = "plugins.alerting.destination." const val EMAIL_DESTINATION_SETTING_PREFIX = DESTINATION_SETTING_PREFIX + "email." val ALLOW_LIST_NONE = emptyList() - val ALLOW_LIST: Setting> = Setting.listSetting( - DESTINATION_SETTING_PREFIX + "allow_list", - LegacyOpenDistroDestinationSettings.ALLOW_LIST, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) + val ALLOW_LIST: Setting> = + Setting.listSetting( + DESTINATION_SETTING_PREFIX + "allow_list", + LegacyOpenDistroDestinationSettings.ALLOW_LIST, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) - val EMAIL_USERNAME: Setting.AffixSetting = Setting.affixKeySetting( - EMAIL_DESTINATION_SETTING_PREFIX, - "username", - // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error - Function { key: String -> - SecureSetting.secureString( - key, - fallback(key, LegacyOpenDistroDestinationSettings.EMAIL_USERNAME, "plugins", "opendistro") - ) - } - ) + val EMAIL_USERNAME: Setting.AffixSetting = + Setting.affixKeySetting( + EMAIL_DESTINATION_SETTING_PREFIX, + "username", + // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error + Function { key: String -> + SecureSetting.secureString( + key, + fallback(key, LegacyOpenDistroDestinationSettings.EMAIL_USERNAME, "plugins", "opendistro"), + ) + }, + ) - val EMAIL_PASSWORD: Setting.AffixSetting = Setting.affixKeySetting( - EMAIL_DESTINATION_SETTING_PREFIX, - "password", - // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error - Function { key: String -> - SecureSetting.secureString( - key, - fallback(key, LegacyOpenDistroDestinationSettings.EMAIL_PASSWORD, "plugins", "opendistro") - ) - } - ) + val EMAIL_PASSWORD: Setting.AffixSetting = + Setting.affixKeySetting( + EMAIL_DESTINATION_SETTING_PREFIX, + "password", + // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error + Function { key: String -> + SecureSetting.secureString( + key, + fallback(key, LegacyOpenDistroDestinationSettings.EMAIL_PASSWORD, "plugins", "opendistro"), + ) + }, + ) - val HOST_DENY_LIST: Setting> = Setting.listSetting( - "plugins.destination.host.deny_list", - LegacyOpenDistroDestinationSettings.HOST_DENY_LIST, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Final - ) + val HOST_DENY_LIST: Setting> = + Setting.listSetting( + "plugins.destination.host.deny_list", + LegacyOpenDistroDestinationSettings.HOST_DENY_LIST, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Final, + ) fun loadDestinationSettings(settings: Settings): Map { // Only loading Email Destination settings for now since those are the only secure settings needed. @@ -80,7 +83,10 @@ class DestinationSettings { return emailAccounts } - private fun getSecureDestinationSettings(settings: Settings, emailAccountName: String): SecureDestinationSettings? { + private fun getSecureDestinationSettings( + settings: Settings, + emailAccountName: String, + ): SecureDestinationSettings? { // Using 'use' to emulate Java's try-with-resources on multiple closeable resources. // Values are cloned so that we maintain a SecureString, the original SecureStrings will be closed after // they have left the scope of this function. @@ -91,19 +97,30 @@ class DestinationSettings { } } - private fun getEmailSettingValue(settings: Settings, emailAccountName: String, emailSetting: Setting.AffixSetting): T? { + private fun getEmailSettingValue( + settings: Settings, + emailAccountName: String, + emailSetting: Setting.AffixSetting, + ): T? { val concreteSetting = emailSetting.getConcreteSettingForNamespace(emailAccountName) return concreteSetting.get(settings) } - private fun fallback(key: String, affixSetting: AffixSetting, regex: String, replacement: String): Setting? { - return if ("_na_" == key) { + private fun fallback( + key: String, + affixSetting: AffixSetting, + regex: String, + replacement: String, + ): Setting? = + if ("_na_" == key) { affixSetting.getConcreteSettingForNamespace(key) } else { affixSetting.getConcreteSetting(key.replace(regex.toRegex(), replacement)) } - } - data class SecureDestinationSettings(val emailUsername: SecureString, val emailPassword: SecureString) + data class SecureDestinationSettings( + val emailUsername: SecureString, + val emailPassword: SecureString, + ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt index 84c000150..099a927b3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroAlertingSettings.kt @@ -14,110 +14,159 @@ import java.util.concurrent.TimeUnit */ class LegacyOpenDistroAlertingSettings { - companion object { - - val ALERTING_MAX_MONITORS = Setting.intSetting( - "opendistro.alerting.monitor.max_monitors", - 1000, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val INPUT_TIMEOUT = Setting.positiveTimeSetting( - "opendistro.alerting.input_timeout", - TimeValue.timeValueSeconds(30), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val INDEX_TIMEOUT = Setting.positiveTimeSetting( - "opendistro.alerting.index_timeout", - TimeValue.timeValueSeconds(60), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val BULK_TIMEOUT = Setting.positiveTimeSetting( - "opendistro.alerting.bulk_timeout", - TimeValue.timeValueSeconds(120), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "opendistro.alerting.alert_backoff_millis", - TimeValue.timeValueMillis(50), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_BACKOFF_COUNT = Setting.intSetting( - "opendistro.alerting.alert_backoff_count", - 2, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val MOVE_ALERTS_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "opendistro.alerting.move_alerts_backoff_millis", - TimeValue.timeValueMillis(250), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val MOVE_ALERTS_BACKOFF_COUNT = Setting.intSetting( - "opendistro.alerting.move_alerts_backoff_count", - 3, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_HISTORY_ENABLED = Setting.boolSetting( - "opendistro.alerting.alert_history_enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_HISTORY_ROLLOVER_PERIOD = Setting.positiveTimeSetting( - "opendistro.alerting.alert_history_rollover_period", - TimeValue.timeValueHours(12), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_HISTORY_INDEX_MAX_AGE = Setting.positiveTimeSetting( - "opendistro.alerting.alert_history_max_age", - TimeValue(30, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_HISTORY_MAX_DOCS = Setting.longSetting( - "opendistro.alerting.alert_history_max_docs", - 1000L, - 0L, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_HISTORY_RETENTION_PERIOD = Setting.positiveTimeSetting( - "opendistro.alerting.alert_history_retention_period", - TimeValue(60, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val ALERT_FINDING_RETENTION_PERIOD = Setting.positiveTimeSetting( - "opendistro.alerting.alert_finding_retention_period", - TimeValue(60, TimeUnit.DAYS), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val REQUEST_TIMEOUT = Setting.positiveTimeSetting( - "opendistro.alerting.request_timeout", - TimeValue.timeValueSeconds(10), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val MAX_ACTION_THROTTLE_VALUE = Setting.positiveTimeSetting( - "opendistro.alerting.action_throttle_max_value", - TimeValue.timeValueHours(24), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val FILTER_BY_BACKEND_ROLES = Setting.boolSetting( - "opendistro.alerting.filter_by_backend_roles", - false, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) + val ALERTING_MAX_MONITORS = + Setting.intSetting( + "opendistro.alerting.monitor.max_monitors", + 1000, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val INPUT_TIMEOUT = + Setting.positiveTimeSetting( + "opendistro.alerting.input_timeout", + TimeValue.timeValueSeconds(30), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val INDEX_TIMEOUT = + Setting.positiveTimeSetting( + "opendistro.alerting.index_timeout", + TimeValue.timeValueSeconds(60), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val BULK_TIMEOUT = + Setting.positiveTimeSetting( + "opendistro.alerting.bulk_timeout", + TimeValue.timeValueSeconds(120), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_BACKOFF_MILLIS = + Setting.positiveTimeSetting( + "opendistro.alerting.alert_backoff_millis", + TimeValue.timeValueMillis(50), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_BACKOFF_COUNT = + Setting.intSetting( + "opendistro.alerting.alert_backoff_count", + 2, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val MOVE_ALERTS_BACKOFF_MILLIS = + Setting.positiveTimeSetting( + "opendistro.alerting.move_alerts_backoff_millis", + TimeValue.timeValueMillis(250), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val MOVE_ALERTS_BACKOFF_COUNT = + Setting.intSetting( + "opendistro.alerting.move_alerts_backoff_count", + 3, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_HISTORY_ENABLED = + Setting.boolSetting( + "opendistro.alerting.alert_history_enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_HISTORY_ROLLOVER_PERIOD = + Setting.positiveTimeSetting( + "opendistro.alerting.alert_history_rollover_period", + TimeValue.timeValueHours(12), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_HISTORY_INDEX_MAX_AGE = + Setting.positiveTimeSetting( + "opendistro.alerting.alert_history_max_age", + TimeValue(30, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_HISTORY_MAX_DOCS = + Setting.longSetting( + "opendistro.alerting.alert_history_max_docs", + 1000L, + 0L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_HISTORY_RETENTION_PERIOD = + Setting.positiveTimeSetting( + "opendistro.alerting.alert_history_retention_period", + TimeValue(60, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val ALERT_FINDING_RETENTION_PERIOD = + Setting.positiveTimeSetting( + "opendistro.alerting.alert_finding_retention_period", + TimeValue(60, TimeUnit.DAYS), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val REQUEST_TIMEOUT = + Setting.positiveTimeSetting( + "opendistro.alerting.request_timeout", + TimeValue.timeValueSeconds(10), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val MAX_ACTION_THROTTLE_VALUE = + Setting.positiveTimeSetting( + "opendistro.alerting.action_throttle_max_value", + TimeValue.timeValueHours(24), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val FILTER_BY_BACKEND_ROLES = + Setting.boolSetting( + "opendistro.alerting.filter_by_backend_roles", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroDestinationSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroDestinationSettings.kt index 73bae6463..369fc1d5b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroDestinationSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/LegacyOpenDistroDestinationSettings.kt @@ -17,45 +17,47 @@ import java.util.function.Function * types require SecureSettings and need additional logic for retrieving and loading them. */ class LegacyOpenDistroDestinationSettings { - companion object { - const val DESTINATION_SETTING_PREFIX = "opendistro.alerting.destination." const val EMAIL_DESTINATION_SETTING_PREFIX = DESTINATION_SETTING_PREFIX + "email." val ALLOW_LIST_ALL = DestinationType.values().toList().map { it.value } val HOST_DENY_LIST_NONE = emptyList() - val ALLOW_LIST: Setting> = Setting.listSetting( - DESTINATION_SETTING_PREFIX + "allow_list", - ALLOW_LIST_ALL, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated - ) + val ALLOW_LIST: Setting> = + Setting.listSetting( + DESTINATION_SETTING_PREFIX + "allow_list", + ALLOW_LIST_ALL, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) - val EMAIL_USERNAME: Setting.AffixSetting = Setting.affixKeySetting( - EMAIL_DESTINATION_SETTING_PREFIX, - "username", - // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error - Function { key: String -> SecureSetting.secureString(key, null) } - ) + val EMAIL_USERNAME: Setting.AffixSetting = + Setting.affixKeySetting( + EMAIL_DESTINATION_SETTING_PREFIX, + "username", + // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error + Function { key: String -> SecureSetting.secureString(key, null) }, + ) - val EMAIL_PASSWORD: Setting.AffixSetting = Setting.affixKeySetting( - EMAIL_DESTINATION_SETTING_PREFIX, - "password", - // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error - Function { key: String -> SecureSetting.secureString(key, null) } - ) + val EMAIL_PASSWORD: Setting.AffixSetting = + Setting.affixKeySetting( + EMAIL_DESTINATION_SETTING_PREFIX, + "password", + // Needed to coerce lambda to Function type for some reason to avoid argument mismatch compile error + Function { key: String -> SecureSetting.secureString(key, null) }, + ) - val HOST_DENY_LIST: Setting> = Setting.listSetting( - "opendistro.destination.host.deny_list", - HOST_DENY_LIST_NONE, - Function.identity(), - Setting.Property.NodeScope, - Setting.Property.Final, - Setting.Property.Deprecated - ) + val HOST_DENY_LIST: Setting> = + Setting.listSetting( + "opendistro.destination.host.deny_list", + HOST_DENY_LIST_NONE, + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Final, + Setting.Property.Deprecated, + ) fun loadLegacyDestinationSettings(settings: Settings): Map { // Only loading Email Destination settings for now since those are the only secure settings needed. @@ -74,7 +76,10 @@ class LegacyOpenDistroDestinationSettings { return emailAccounts } - private fun getLegacySecureDestinationSettings(settings: Settings, emailAccountName: String): SecureDestinationSettings? { + private fun getLegacySecureDestinationSettings( + settings: Settings, + emailAccountName: String, + ): SecureDestinationSettings? { // Using 'use' to emulate Java's try-with-resources on multiple closeable resources. // Values are cloned so that we maintain a SecureString, the original SecureStrings will be closed after // they have left the scope of this function. @@ -88,12 +93,15 @@ class LegacyOpenDistroDestinationSettings { private fun getLegacyEmailSettingValue( settings: Settings, emailAccountName: String, - emailSetting: Setting.AffixSetting + emailSetting: Setting.AffixSetting, ): T? { val concreteSetting = emailSetting.getConcreteSettingForNamespace(emailAccountName) return concreteSetting.get(settings) } - data class SecureDestinationSettings(val emailUsername: SecureString, val emailPassword: SecureString) + data class SecureDestinationSettings( + val emailUsername: SecureString, + val emailPassword: SecureString, + ) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt b/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt index f71051ea2..aa4c02445 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/settings/SupportedClusterMetricsSettings.kt @@ -61,9 +61,11 @@ class SupportedClusterMetricsSettings : org.opensearch.commons.alerting.settings val supportedJsonPayloads = SupportedClusterMetricsSettings::class.java.getResource(RESOURCE_FILE) @Suppress("UNCHECKED_CAST") - if (supportedJsonPayloads != null) - supportedApiList = XContentHelper.convertToMap(JsonXContent.jsonXContent, supportedJsonPayloads.readText(), false) - as HashMap>> + if (supportedJsonPayloads != null) { + supportedApiList = + XContentHelper.convertToMap(JsonXContent.jsonXContent, supportedJsonPayloads.readText(), false) + as HashMap>> + } } /** @@ -72,9 +74,8 @@ class SupportedClusterMetricsSettings : org.opensearch.commons.alerting.settings * @return The map of the supported json payload for the requested API. * @throws IllegalArgumentException When supportedApiList does not contain a value for the provided key. */ - fun getSupportedJsonPayload(path: String): Map> { - return supportedApiList[path] ?: throw IllegalArgumentException("API path not in supportedApiList.") - } + fun getSupportedJsonPayload(path: String): Map> = + supportedApiList[path] ?: throw IllegalArgumentException("API path not in supportedApiList.") /** * Will return an [ActionRequest] for the API associated with that path. @@ -86,47 +87,71 @@ class SupportedClusterMetricsSettings : org.opensearch.commons.alerting.settings fun resolveToActionRequest(clusterMetricsInput: ClusterMetricsInput): ActionRequest { val pathParams = clusterMetricsInput.parsePathParams() return when (clusterMetricsInput.clusterMetricType) { - ClusterMetricType.CAT_INDICES -> CatIndicesRequestWrapper(pathParams) - ClusterMetricType.CAT_PENDING_TASKS -> PendingClusterTasksRequest() + ClusterMetricType.CAT_INDICES -> { + CatIndicesRequestWrapper(pathParams) + } + + ClusterMetricType.CAT_PENDING_TASKS -> { + PendingClusterTasksRequest() + } + ClusterMetricType.CAT_RECOVERY -> { if (pathParams.isEmpty()) return RecoveryRequest() val pathParamsArray = pathParams.split(",").toTypedArray() return RecoveryRequest(*pathParamsArray) } - ClusterMetricType.CAT_SHARDS -> CatShardsRequestWrapper(pathParams) + + ClusterMetricType.CAT_SHARDS -> { + CatShardsRequestWrapper(pathParams) + } + ClusterMetricType.CAT_SNAPSHOTS -> { return GetSnapshotsRequest(pathParams, arrayOf(GetSnapshotsRequest.ALL_SNAPSHOTS)) } - ClusterMetricType.CAT_TASKS -> ListTasksRequest() + + ClusterMetricType.CAT_TASKS -> { + ListTasksRequest() + } + ClusterMetricType.CLUSTER_HEALTH -> { if (pathParams.isEmpty()) return ClusterHealthRequest() val pathParamsArray = pathParams.split(",").toTypedArray() return ClusterHealthRequest(*pathParamsArray) } - ClusterMetricType.CLUSTER_SETTINGS -> ClusterStateRequest().routingTable(false).nodes(false) + + ClusterMetricType.CLUSTER_SETTINGS -> { + ClusterStateRequest().routingTable(false).nodes(false) + } + ClusterMetricType.CLUSTER_STATS -> { if (pathParams.isEmpty()) return ClusterStatsRequest() val pathParamsArray = pathParams.split(",").toTypedArray() return ClusterStatsRequest(*pathParamsArray) } - ClusterMetricType.NODES_STATS -> NodesStatsRequest().addMetrics( - "os", - "process", - "jvm", - "thread_pool", - "fs", - "transport", - "http", - "breaker", - "script", - "discovery", - "ingest", - "adaptive_selection", - "script_cache", - "indexing_pressure", - "shard_indexing_pressure" - ) - else -> throw IllegalArgumentException("Unsupported API.") + + ClusterMetricType.NODES_STATS -> { + NodesStatsRequest().addMetrics( + "os", + "process", + "jvm", + "thread_pool", + "fs", + "transport", + "http", + "breaker", + "script", + "discovery", + "ingest", + "adaptive_selection", + "script_cache", + "indexing_pressure", + "shard_indexing_pressure", + ) + } + + else -> { + throw IllegalArgumentException("Unsupported API.") + } } } @@ -137,8 +162,9 @@ class SupportedClusterMetricsSettings : org.opensearch.commons.alerting.settings * @throws IllegalArgumentException when supportedApiList does not contain the provided path. */ fun validateApiTyped(clusterMetricsInput: ClusterMetricsInput) { - if (!supportedApiList.keys.contains(clusterMetricsInput.clusterMetricType.defaultPath)) + if (!supportedApiList.keys.contains(clusterMetricsInput.clusterMetricType.defaultPath)) { throw IllegalArgumentException("API path not in supportedApiList.") + } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/SecureTransportAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/SecureTransportAction.kt index 54667e125..8f05473d0 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/SecureTransportAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/SecureTransportAction.kt @@ -36,7 +36,6 @@ private val log = LogManager.getLogger(SecureTransportAction::class.java) * c) Users can edit and save the monitors to associate their backend_roles. */ interface SecureTransportAction { - var filterByEnabled: Boolean fun listenFilterBySettingChange(clusterService: ClusterService) { @@ -61,29 +60,34 @@ interface SecureTransportAction { /** * 'all_access' role users are treated as admins. */ - fun isAdmin(user: User?): Boolean { - return when { + fun isAdmin(user: User?): Boolean = + when { user == null -> { false } + user.roles?.isNullOrEmpty() == true -> { false } + else -> { user.roles?.contains("all_access") == true } } - } - fun validateUserBackendRoles(user: User?, actionListener: ActionListener): Boolean { + fun validateUserBackendRoles( + user: User?, + actionListener: ActionListener, + ): Boolean { if (filterByEnabled) { if (user == null) { actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( - "Filter by user backend roles is enabled with security disabled.", RestStatus.FORBIDDEN - ) - ) + "Filter by user backend roles is enabled with security disabled.", + RestStatus.FORBIDDEN, + ), + ), ) return false } else if (isAdmin(user)) { @@ -91,8 +95,11 @@ interface SecureTransportAction { } else if (user.backendRoles.isNullOrEmpty()) { actionListener.onFailure( AlertingException.wrap( - OpenSearchStatusException("User doesn't have backend roles configured. Contact administrator", RestStatus.FORBIDDEN) - ) + OpenSearchStatusException( + "User doesn't have backend roles configured. Contact administrator", + RestStatus.FORBIDDEN, + ), + ), ) return false } @@ -111,9 +118,8 @@ interface SecureTransportAction { resourceUser: User?, actionListener: ActionListener, resourceType: String, - resourceId: String + resourceId: String, ): Boolean { - if (!doFilterForUser(requesterUser)) return true val resourceBackendRoles = resourceUser?.backendRoles @@ -128,9 +134,9 @@ interface SecureTransportAction { AlertingException.wrap( OpenSearchStatusException( "Do not have permissions to resource, $resourceType, with id, $resourceId", - RestStatus.FORBIDDEN - ) - ) + RestStatus.FORBIDDEN, + ), + ), ) return false } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt index 6cc60732a..efcc79a90 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeAlertAction.kt @@ -57,208 +57,240 @@ import java.util.Locale private val log = LogManager.getLogger(TransportAcknowledgeAlertAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportAcknowledgeAlertAction @Inject constructor( - transportService: TransportService, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val transportGetMonitorAction: TransportGetMonitorAction -) : HandledTransportAction( - AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_NAME, transportService, actionFilters, ::AcknowledgeAlertRequest -) { +class TransportAcknowledgeAlertAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val transportGetMonitorAction: TransportGetMonitorAction, + ) : HandledTransportAction( + AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_NAME, + transportService, + actionFilters, + ::AcknowledgeAlertRequest, + ) { + @Volatile + private var isAlertHistoryEnabled = AlertingSettings.ALERT_HISTORY_ENABLED.get(settings) - @Volatile - private var isAlertHistoryEnabled = AlertingSettings.ALERT_HISTORY_ENABLED.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERT_HISTORY_ENABLED) { isAlertHistoryEnabled = it } - } + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERT_HISTORY_ENABLED) { isAlertHistoryEnabled = it } + } - override fun doExecute( - task: Task, - acknowledgeAlertRequest: ActionRequest, - actionListener: ActionListener - ) { - val request = acknowledgeAlertRequest as? AcknowledgeAlertRequest - ?: recreateObject(acknowledgeAlertRequest) { AcknowledgeAlertRequest(it) } - client.threadPool().threadContext.stashContext().use { - scope.launch { - val getMonitorResponse: GetMonitorResponse = - transportGetMonitorAction.client.suspendUntil { - val getMonitorRequest = GetMonitorRequest( - monitorId = request.monitorId, - -3L, - RestRequest.Method.GET, - FetchSourceContext.FETCH_SOURCE - ) - execute(AlertingActions.GET_MONITOR_ACTION_TYPE, getMonitorRequest, it) - } - if (getMonitorResponse.monitor == null) { - actionListener.onFailure( - AlertingException.wrap( - ResourceNotFoundException( - String.format( - Locale.ROOT, - "No monitor found with id [%s]", - request.monitorId + override fun doExecute( + task: Task, + acknowledgeAlertRequest: ActionRequest, + actionListener: ActionListener, + ) { + val request = + acknowledgeAlertRequest as? AcknowledgeAlertRequest + ?: recreateObject(acknowledgeAlertRequest) { AcknowledgeAlertRequest(it) } + client.threadPool().threadContext.stashContext().use { + scope.launch { + val getMonitorResponse: GetMonitorResponse = + transportGetMonitorAction.client.suspendUntil { + val getMonitorRequest = + GetMonitorRequest( + monitorId = request.monitorId, + -3L, + RestRequest.Method.GET, + FetchSourceContext.FETCH_SOURCE, ) - ) + execute(AlertingActions.GET_MONITOR_ACTION_TYPE, getMonitorRequest, it) + } + if (getMonitorResponse.monitor == null) { + actionListener.onFailure( + AlertingException.wrap( + ResourceNotFoundException( + String.format( + Locale.ROOT, + "No monitor found with id [%s]", + request.monitorId, + ), + ), + ), ) - ) - } else { - AcknowledgeHandler(client, actionListener, request).start(getMonitorResponse.monitor!!) + } else { + AcknowledgeHandler(client, actionListener, request).start(getMonitorResponse.monitor!!) + } } } } - } - inner class AcknowledgeHandler( - private val client: Client, - private val actionListener: ActionListener, - private val request: AcknowledgeAlertRequest - ) { - val alerts = mutableMapOf() + inner class AcknowledgeHandler( + private val client: Client, + private val actionListener: ActionListener, + private val request: AcknowledgeAlertRequest, + ) { + val alerts = mutableMapOf() - suspend fun start(monitor: Monitor) = findActiveAlerts(monitor) + suspend fun start(monitor: Monitor) = findActiveAlerts(monitor) - private suspend fun findActiveAlerts(monitor: Monitor) { - val queryBuilder = QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, request.monitorId)) - .filter(QueryBuilders.termsQuery("_id", request.alertIds)) - val searchRequest = SearchRequest() - .indices(monitor.dataSources.alertsIndex) - .routing(request.monitorId) - .source( - SearchSourceBuilder() - .query(queryBuilder) - .version(true) - .seqNoAndPrimaryTerm(true) - .size(request.alertIds.size) - ) - try { - val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - onSearchResponse(searchResponse, monitor) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + private suspend fun findActiveAlerts(monitor: Monitor) { + val queryBuilder = + QueryBuilders + .boolQuery() + .filter(QueryBuilders.termQuery(Alert.MONITOR_ID_FIELD, request.monitorId)) + .filter(QueryBuilders.termsQuery("_id", request.alertIds)) + val searchRequest = + SearchRequest() + .indices(monitor.dataSources.alertsIndex) + .routing(request.monitorId) + .source( + SearchSourceBuilder() + .query(queryBuilder) + .version(true) + .seqNoAndPrimaryTerm(true) + .size(request.alertIds.size), + ) + try { + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + onSearchResponse(searchResponse, monitor) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } } - } - private suspend fun onSearchResponse(response: SearchResponse, monitor: Monitor) { - val alertsHistoryIndex = monitor.dataSources.alertsHistoryIndex - val updateRequests = mutableListOf() - val copyRequests = mutableListOf() - response.hits.forEach { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alerts[alert.id] = alert + private suspend fun onSearchResponse( + response: SearchResponse, + monitor: Monitor, + ) { + val alertsHistoryIndex = monitor.dataSources.alertsHistoryIndex + val updateRequests = mutableListOf() + val copyRequests = mutableListOf() + response.hits.forEach { hit -> + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alerts[alert.id] = alert - if (alert.state == Alert.State.ACTIVE) { - if ( - alert.findingIds.isEmpty() || - !isAlertHistoryEnabled - ) { - val updateRequest = UpdateRequest(monitor.dataSources.alertsIndex, alert.id) - .routing(request.monitorId) - .setIfSeqNo(hit.seqNo) - .setIfPrimaryTerm(hit.primaryTerm) - .doc( - XContentFactory.jsonBuilder().startObject() - .field(Alert.STATE_FIELD, Alert.State.ACKNOWLEDGED.toString()) - .optionalTimeField(Alert.ACKNOWLEDGED_TIME_FIELD, Instant.now()) - .endObject() - ) - updateRequests.add(updateRequest) - } else { - val copyRequest = IndexRequest(alertsHistoryIndex) - .routing(request.monitorId) - .id(alert.id) - .source( - alert.copy(state = Alert.State.ACKNOWLEDGED, acknowledgedTime = Instant.now()) - .toXContentWithUser(XContentFactory.jsonBuilder()) - ) - copyRequests.add(copyRequest) + if (alert.state == Alert.State.ACTIVE) { + if ( + alert.findingIds.isEmpty() || + !isAlertHistoryEnabled + ) { + val updateRequest = + UpdateRequest(monitor.dataSources.alertsIndex, alert.id) + .routing(request.monitorId) + .setIfSeqNo(hit.seqNo) + .setIfPrimaryTerm(hit.primaryTerm) + .doc( + XContentFactory + .jsonBuilder() + .startObject() + .field(Alert.STATE_FIELD, Alert.State.ACKNOWLEDGED.toString()) + .optionalTimeField(Alert.ACKNOWLEDGED_TIME_FIELD, Instant.now()) + .endObject(), + ) + updateRequests.add(updateRequest) + } else { + val copyRequest = + IndexRequest(alertsHistoryIndex) + .routing(request.monitorId) + .id(alert.id) + .source( + alert + .copy(state = Alert.State.ACKNOWLEDGED, acknowledgedTime = Instant.now()) + .toXContentWithUser(XContentFactory.jsonBuilder()), + ) + copyRequests.add(copyRequest) + } } } - } - try { - val updateResponse: BulkResponse? = if (updateRequests.isNotEmpty()) - client.suspendUntil { - client.bulk(BulkRequest().add(updateRequests).setRefreshPolicy(request.refreshPolicy), it) - } - else null - val copyResponse: BulkResponse? = if (copyRequests.isNotEmpty()) - client.suspendUntil { - client.bulk(BulkRequest().add(copyRequests).setRefreshPolicy(request.refreshPolicy), it) - } - else null - onBulkResponse(updateResponse, copyResponse, monitor) - } catch (t: Exception) { - log.error("ack error: ${t.message}") - actionListener.onFailure(AlertingException.wrap(t)) + try { + val updateResponse: BulkResponse? = + if (updateRequests.isNotEmpty()) { + client.suspendUntil { + client.bulk(BulkRequest().add(updateRequests).setRefreshPolicy(request.refreshPolicy), it) + } + } else { + null + } + val copyResponse: BulkResponse? = + if (copyRequests.isNotEmpty()) { + client.suspendUntil { + client.bulk(BulkRequest().add(copyRequests).setRefreshPolicy(request.refreshPolicy), it) + } + } else { + null + } + onBulkResponse(updateResponse, copyResponse, monitor) + } catch (t: Exception) { + log.error("ack error: ${t.message}") + actionListener.onFailure(AlertingException.wrap(t)) + } } - } - private suspend fun onBulkResponse(updateResponse: BulkResponse?, copyResponse: BulkResponse?, monitor: Monitor) { - val deleteRequests = mutableListOf() - val missing = request.alertIds.toMutableSet() - val acknowledged = mutableListOf() - val failed = mutableListOf() + private suspend fun onBulkResponse( + updateResponse: BulkResponse?, + copyResponse: BulkResponse?, + monitor: Monitor, + ) { + val deleteRequests = mutableListOf() + val missing = request.alertIds.toMutableSet() + val acknowledged = mutableListOf() + val failed = mutableListOf() - alerts.values.forEach { - if (it.state != Alert.State.ACTIVE) { - missing.remove(it.id) - failed.add(it) + alerts.values.forEach { + if (it.state != Alert.State.ACTIVE) { + missing.remove(it.id) + failed.add(it) + } } - } - updateResponse?.items?.forEach { item -> - missing.remove(item.id) - if (item.isFailed) { - failed.add(alerts[item.id]!!) - } else { - acknowledged.add(alerts[item.id]!!) + updateResponse?.items?.forEach { item -> + missing.remove(item.id) + if (item.isFailed) { + failed.add(alerts[item.id]!!) + } else { + acknowledged.add(alerts[item.id]!!) + } } - } - copyResponse?.items?.forEach { item -> - log.info("got a copyResponse: $item") - missing.remove(item.id) - if (item.isFailed) { - log.info("got a failureResponse: ${item.failureMessage}") - failed.add(alerts[item.id]!!) - } else { - val deleteRequest = DeleteRequest(monitor.dataSources.alertsIndex, item.id) - .routing(request.monitorId) - deleteRequests.add(deleteRequest) + copyResponse?.items?.forEach { item -> + log.info("got a copyResponse: $item") + missing.remove(item.id) + if (item.isFailed) { + log.info("got a failureResponse: ${item.failureMessage}") + failed.add(alerts[item.id]!!) + } else { + val deleteRequest = + DeleteRequest(monitor.dataSources.alertsIndex, item.id) + .routing(request.monitorId) + deleteRequests.add(deleteRequest) + } } - } - if (deleteRequests.isNotEmpty()) { - try { - val deleteResponse: BulkResponse = client.suspendUntil { - client.bulk(BulkRequest().add(deleteRequests).setRefreshPolicy(request.refreshPolicy), it) - } - deleteResponse.items.forEach { item -> - missing.remove(item.id) - if (item.isFailed) { - failed.add(alerts[item.id]!!) - } else { - acknowledged.add(alerts[item.id]!!) + if (deleteRequests.isNotEmpty()) { + try { + val deleteResponse: BulkResponse = + client.suspendUntil { + client.bulk(BulkRequest().add(deleteRequests).setRefreshPolicy(request.refreshPolicy), it) + } + deleteResponse.items.forEach { item -> + missing.remove(item.id) + if (item.isFailed) { + failed.add(alerts[item.id]!!) + } else { + acknowledged.add(alerts[item.id]!!) + } } + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + return } - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - return } + actionListener.onResponse(AcknowledgeAlertResponse(acknowledged.toList(), failed.toList(), missing.toList())) } - actionListener.onResponse(AcknowledgeAlertResponse(acknowledged.toList(), failed.toList(), missing.toList())) } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeChainedAlertAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeChainedAlertAction.kt index 1d94cc2f8..dd515641d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeChainedAlertAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportAcknowledgeChainedAlertAction.kt @@ -62,236 +62,270 @@ import java.util.Locale private val log = LogManager.getLogger(TransportAcknowledgeChainedAlertAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportAcknowledgeChainedAlertAction @Inject constructor( - transportService: TransportService, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, -) : HandledTransportAction( - AlertingActions.ACKNOWLEDGE_CHAINED_ALERTS_ACTION_NAME, - transportService, - actionFilters, - ::AcknowledgeChainedAlertRequest -) { - @Volatile - private var isAlertHistoryEnabled = AlertingSettings.ALERT_HISTORY_ENABLED.get(settings) +class TransportAcknowledgeChainedAlertAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + AlertingActions.ACKNOWLEDGE_CHAINED_ALERTS_ACTION_NAME, + transportService, + actionFilters, + ::AcknowledgeChainedAlertRequest, + ) { + @Volatile + private var isAlertHistoryEnabled = AlertingSettings.ALERT_HISTORY_ENABLED.get(settings) - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERT_HISTORY_ENABLED) { isAlertHistoryEnabled = it } - } + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERT_HISTORY_ENABLED) { isAlertHistoryEnabled = it } + } - override fun doExecute( - task: Task, - AcknowledgeChainedAlertRequest: ActionRequest, - actionListener: ActionListener, - ) { - val request = AcknowledgeChainedAlertRequest as? AcknowledgeChainedAlertRequest - ?: recreateObject(AcknowledgeChainedAlertRequest) { AcknowledgeChainedAlertRequest(it) } - client.threadPool().threadContext.stashContext().use { - scope.launch { - try { - val getResponse = getWorkflow(request.workflowId) - if (getResponse.isExists == false) { - actionListener.onFailure( - AlertingException.wrap( - ResourceNotFoundException( - String.format( - Locale.ROOT, - "No workflow found with id [%s]", - request.workflowId - ) - ) + override fun doExecute( + task: Task, + AcknowledgeChainedAlertRequest: ActionRequest, + actionListener: ActionListener, + ) { + val request = + AcknowledgeChainedAlertRequest as? AcknowledgeChainedAlertRequest + ?: recreateObject(AcknowledgeChainedAlertRequest) { AcknowledgeChainedAlertRequest(it) } + client.threadPool().threadContext.stashContext().use { + scope.launch { + try { + val getResponse = getWorkflow(request.workflowId) + if (getResponse.isExists == false) { + actionListener.onFailure( + AlertingException.wrap( + ResourceNotFoundException( + String.format( + Locale.ROOT, + "No workflow found with id [%s]", + request.workflowId, + ), + ), + ), ) - ) - } else { - val workflow = ScheduledJobUtils.parseWorkflowFromScheduledJobDocSource(xContentRegistry, getResponse) - AcknowledgeHandler(client, actionListener, request).start(workflow = workflow) + } else { + val workflow = ScheduledJobUtils.parseWorkflowFromScheduledJobDocSource(xContentRegistry, getResponse) + AcknowledgeHandler(client, actionListener, request).start(workflow = workflow) + } + } catch (e: Exception) { + log.error("Failed to acknowledge chained alerts from request $request", e) + actionListener.onFailure(AlertingException.wrap(e)) } - } catch (e: Exception) { - log.error("Failed to acknowledge chained alerts from request $request", e) - actionListener.onFailure(AlertingException.wrap(e)) } } } - } - - private suspend fun getWorkflow(workflowId: String): GetResponse { - return client.suspendUntil { client.get(GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId), it) } - } - - inner class AcknowledgeHandler( - private val client: Client, - private val actionListener: ActionListener, - private val request: AcknowledgeChainedAlertRequest, - ) { - val alerts = mutableMapOf() - - suspend fun start(workflow: Workflow) = findActiveAlerts(workflow) - - private suspend fun findActiveAlerts(workflow: Workflow) { - try { - val queryBuilder = QueryBuilders.boolQuery() - .must( - QueryBuilders.wildcardQuery("workflow_id", request.workflowId) - ) - .must(QueryBuilders.termsQuery("_id", request.alertIds)) - if (workflow.inputs.isEmpty() || (workflow.inputs[0] is CompositeInput) == false) { - actionListener.onFailure( - OpenSearchStatusException("Workflow ${workflow.id} is invalid", RestStatus.INTERNAL_SERVER_ERROR) - ) - return - } - val compositeInput = workflow.inputs[0] as CompositeInput - val workflowId = compositeInput.sequence.delegates[0].monitorId - val dataSources: DataSources = getDataSources(workflowId) - val searchRequest = SearchRequest() - .indices(dataSources.alertsIndex) - .routing(request.workflowId) - .source( - SearchSourceBuilder() - .query(queryBuilder) - .version(true) - .seqNoAndPrimaryTerm(true) - .size(request.alertIds.size) - ) - val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - onSearchResponse(searchResponse, workflow, dataSources) - } catch (t: Exception) { - log.error("Failed to acknowledge chained alert ${request.alertIds} for workflow ${request.workflowId}", t) - actionListener.onFailure(AlertingException.wrap(t)) + private suspend fun getWorkflow(workflowId: String): GetResponse = + client.suspendUntil { + client.get(GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId), it) } - } - private suspend fun getDataSources(monitorId: String): DataSources { - val getResponse: GetResponse = client.suspendUntil { client.get(GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId), it) } - return ScheduledJobUtils.parseMonitorFromScheduledJobDocSource(xContentRegistry, getResponse).dataSources - } + inner class AcknowledgeHandler( + private val client: Client, + private val actionListener: ActionListener, + private val request: AcknowledgeChainedAlertRequest, + ) { + val alerts = mutableMapOf() - private suspend fun onSearchResponse(response: SearchResponse, workflow: Workflow, dataSources: DataSources) { - val alertsHistoryIndex = dataSources.alertsHistoryIndex - val updateRequests = mutableListOf() - val copyRequests = mutableListOf() - response.hits.forEach { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alerts[alert.id] = alert + suspend fun start(workflow: Workflow) = findActiveAlerts(workflow) - if (alert.state == Alert.State.ACTIVE) { - if ( - alert.findingIds.isEmpty() || - !isAlertHistoryEnabled - ) { - val updateRequest = UpdateRequest(dataSources.alertsIndex, alert.id) - .routing(request.workflowId) - .setIfSeqNo(hit.seqNo) - .setIfPrimaryTerm(hit.primaryTerm) - .doc( - XContentFactory.jsonBuilder().startObject() - .field(Alert.STATE_FIELD, Alert.State.ACKNOWLEDGED.toString()) - .optionalTimeField(Alert.ACKNOWLEDGED_TIME_FIELD, Instant.now()) - .endObject() - ) - updateRequests.add(updateRequest) - } else { - val copyRequest = IndexRequest(alertsHistoryIndex) + private suspend fun findActiveAlerts(workflow: Workflow) { + try { + val queryBuilder = + QueryBuilders + .boolQuery() + .must( + QueryBuilders.wildcardQuery("workflow_id", request.workflowId), + ).must(QueryBuilders.termsQuery("_id", request.alertIds)) + if (workflow.inputs.isEmpty() || (workflow.inputs[0] is CompositeInput) == false) { + actionListener.onFailure( + OpenSearchStatusException("Workflow ${workflow.id} is invalid", RestStatus.INTERNAL_SERVER_ERROR), + ) + return + } + val compositeInput = workflow.inputs[0] as CompositeInput + val workflowId = compositeInput.sequence.delegates[0].monitorId + val dataSources: DataSources = getDataSources(workflowId) + val searchRequest = + SearchRequest() + .indices(dataSources.alertsIndex) .routing(request.workflowId) - .id(alert.id) .source( - alert.copy(state = Alert.State.ACKNOWLEDGED, acknowledgedTime = Instant.now()) - .toXContentWithUser(XContentFactory.jsonBuilder()) + SearchSourceBuilder() + .query(queryBuilder) + .version(true) + .seqNoAndPrimaryTerm(true) + .size(request.alertIds.size), ) - copyRequests.add(copyRequest) - } + + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + onSearchResponse(searchResponse, workflow, dataSources) + } catch (t: Exception) { + log.error("Failed to acknowledge chained alert ${request.alertIds} for workflow ${request.workflowId}", t) + actionListener.onFailure(AlertingException.wrap(t)) } } - try { - val updateResponse: BulkResponse? = if (updateRequests.isNotEmpty()) { - client.suspendUntil { - client.bulk(BulkRequest().add(updateRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) - } - } else null - val copyResponse: BulkResponse? = if (copyRequests.isNotEmpty()) { + private suspend fun getDataSources(monitorId: String): DataSources { + val getResponse: GetResponse = client.suspendUntil { - client.bulk(BulkRequest().add(copyRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + client.get( + GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId), + it, + ) } - } else null - onBulkResponse(updateResponse, copyResponse, dataSources) - } catch (t: Exception) { - log.error("Failed to acknowledge chained alert ${request.alertIds} for workflow ${request.workflowId}", t) - actionListener.onFailure(AlertingException.wrap(t)) + return ScheduledJobUtils.parseMonitorFromScheduledJobDocSource(xContentRegistry, getResponse).dataSources } - } - private suspend fun onBulkResponse(updateResponse: BulkResponse?, copyResponse: BulkResponse?, dataSources: DataSources) { - val deleteRequests = mutableListOf() - val acknowledged = mutableListOf() - val missing = request.alertIds.toMutableSet() - val failed = mutableListOf() + private suspend fun onSearchResponse( + response: SearchResponse, + workflow: Workflow, + dataSources: DataSources, + ) { + val alertsHistoryIndex = dataSources.alertsHistoryIndex + val updateRequests = mutableListOf() + val copyRequests = mutableListOf() + response.hits.forEach { hit -> + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alerts[alert.id] = alert - alerts.values.forEach { - if (it.state != Alert.State.ACTIVE) { - missing.remove(it.id) - failed.add(it) + if (alert.state == Alert.State.ACTIVE) { + if ( + alert.findingIds.isEmpty() || + !isAlertHistoryEnabled + ) { + val updateRequest = + UpdateRequest(dataSources.alertsIndex, alert.id) + .routing(request.workflowId) + .setIfSeqNo(hit.seqNo) + .setIfPrimaryTerm(hit.primaryTerm) + .doc( + XContentFactory + .jsonBuilder() + .startObject() + .field(Alert.STATE_FIELD, Alert.State.ACKNOWLEDGED.toString()) + .optionalTimeField(Alert.ACKNOWLEDGED_TIME_FIELD, Instant.now()) + .endObject(), + ) + updateRequests.add(updateRequest) + } else { + val copyRequest = + IndexRequest(alertsHistoryIndex) + .routing(request.workflowId) + .id(alert.id) + .source( + alert + .copy(state = Alert.State.ACKNOWLEDGED, acknowledgedTime = Instant.now()) + .toXContentWithUser(XContentFactory.jsonBuilder()), + ) + copyRequests.add(copyRequest) + } + } } - } - updateResponse?.items?.forEach { item -> - missing.remove(item.id) - if (item.isFailed) { - failed.add(alerts[item.id]!!) - } else { - acknowledged.add(alerts[item.id]!!) + try { + val updateResponse: BulkResponse? = + if (updateRequests.isNotEmpty()) { + client.suspendUntil { + client.bulk(BulkRequest().add(updateRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + } + } else { + null + } + val copyResponse: BulkResponse? = + if (copyRequests.isNotEmpty()) { + client.suspendUntil { + client.bulk(BulkRequest().add(copyRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + } + } else { + null + } + onBulkResponse(updateResponse, copyResponse, dataSources) + } catch (t: Exception) { + log.error("Failed to acknowledge chained alert ${request.alertIds} for workflow ${request.workflowId}", t) + actionListener.onFailure(AlertingException.wrap(t)) } } - copyResponse?.items?.forEach { item -> - log.info("got a copyResponse: $item") - missing.remove(item.id) - if (item.isFailed) { - log.info("got a failureResponse: ${item.failureMessage}") - failed.add(alerts[item.id]!!) - } else { - val deleteRequest = DeleteRequest(dataSources.alertsIndex, item.id) - .routing(request.workflowId) - deleteRequests.add(deleteRequest) + private suspend fun onBulkResponse( + updateResponse: BulkResponse?, + copyResponse: BulkResponse?, + dataSources: DataSources, + ) { + val deleteRequests = mutableListOf() + val acknowledged = mutableListOf() + val missing = request.alertIds.toMutableSet() + val failed = mutableListOf() + + alerts.values.forEach { + if (it.state != Alert.State.ACTIVE) { + missing.remove(it.id) + failed.add(it) + } } - } - if (deleteRequests.isNotEmpty()) { - try { - val deleteResponse: BulkResponse = client.suspendUntil { - client.bulk(BulkRequest().add(deleteRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + updateResponse?.items?.forEach { item -> + missing.remove(item.id) + if (item.isFailed) { + failed.add(alerts[item.id]!!) + } else { + acknowledged.add(alerts[item.id]!!) } - deleteResponse.items.forEach { item -> - missing.remove(item.id) - if (item.isFailed) { - failed.add(alerts[item.id]!!) - } else { - acknowledged.add(alerts[item.id]!!) + } + + copyResponse?.items?.forEach { item -> + log.info("got a copyResponse: $item") + missing.remove(item.id) + if (item.isFailed) { + log.info("got a failureResponse: ${item.failureMessage}") + failed.add(alerts[item.id]!!) + } else { + val deleteRequest = + DeleteRequest(dataSources.alertsIndex, item.id) + .routing(request.workflowId) + deleteRequests.add(deleteRequest) + } + } + + if (deleteRequests.isNotEmpty()) { + try { + val deleteResponse: BulkResponse = + client.suspendUntil { + client.bulk(BulkRequest().add(deleteRequests).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), it) + } + deleteResponse.items.forEach { item -> + missing.remove(item.id) + if (item.isFailed) { + failed.add(alerts[item.id]!!) + } else { + acknowledged.add(alerts[item.id]!!) + } } + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + return } - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - return } - } - actionListener.onResponse( - AcknowledgeAlertResponse( - acknowledged.toList(), - failed.toList(), - missing.toList() + actionListener.onResponse( + AcknowledgeAlertResponse( + acknowledged.toList(), + failed.toList(), + missing.toList(), + ), ) - ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteAlertingCommentAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteAlertingCommentAction.kt index 09f8d2e00..20abfd196 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteAlertingCommentAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteAlertingCommentAction.kt @@ -46,144 +46,157 @@ import org.opensearch.transport.client.Client private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) private val log = LogManager.getLogger(TransportDeleteAlertingCommentAction::class.java) -class TransportDeleteAlertingCommentAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - settings: Settings, - val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - AlertingActions.DELETE_COMMENT_ACTION_NAME, transportService, actionFilters, ::DeleteCommentRequest -), - SecureTransportAction { - - @Volatile private var alertingCommentsEnabled = AlertingSettings.ALERTING_COMMENTS_ENABLED.get(settings) - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERTING_COMMENTS_ENABLED) { - alertingCommentsEnabled = it +class TransportDeleteAlertingCommentAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + AlertingActions.DELETE_COMMENT_ACTION_NAME, + transportService, + actionFilters, + ::DeleteCommentRequest, + ), + SecureTransportAction { + @Volatile private var alertingCommentsEnabled = AlertingSettings.ALERTING_COMMENTS_ENABLED.get(settings) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERTING_COMMENTS_ENABLED) { + alertingCommentsEnabled = it + } + listenFilterBySettingChange(clusterService) } - listenFilterBySettingChange(clusterService) - } - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - // validate feature flag enabled - if (!alertingCommentsEnabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Comments for Alerting is currently disabled", RestStatus.FORBIDDEN), + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + // validate feature flag enabled + if (!alertingCommentsEnabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Comments for Alerting is currently disabled", RestStatus.FORBIDDEN), + ), ) - ) - return - } + return + } - val transformedRequest = request as? DeleteCommentRequest - ?: recreateObject(request) { DeleteCommentRequest(it) } + val transformedRequest = + request as? DeleteCommentRequest + ?: recreateObject(request) { DeleteCommentRequest(it) } - val user = readUserFromThreadContext(client) + val user = readUserFromThreadContext(client) - if (!validateUserBackendRoles(user, actionListener)) { - return - } - scope.launch { - DeleteCommentHandler( - client, - actionListener, - user, - transformedRequest.commentId - ).resolveUserAndStart() + if (!validateUserBackendRoles(user, actionListener)) { + return + } + scope.launch { + DeleteCommentHandler( + client, + actionListener, + user, + transformedRequest.commentId, + ).resolveUserAndStart() + } } - } - - inner class DeleteCommentHandler( - private val client: Client, - private val actionListener: ActionListener, - private val user: User?, - private val commentId: String - ) { - - private var sourceIndex: String? = null - suspend fun resolveUserAndStart() { - try { - val comment = getComment() ?: return - - if (sourceIndex == null) { - actionListener.onFailure( - AlertingException( - "Could not resolve the index the given Comment came from", - RestStatus.INTERNAL_SERVER_ERROR, - IllegalStateException() + inner class DeleteCommentHandler( + private val client: Client, + private val actionListener: ActionListener, + private val user: User?, + private val commentId: String, + ) { + private var sourceIndex: String? = null + + suspend fun resolveUserAndStart() { + try { + val comment = getComment() ?: return + + if (sourceIndex == null) { + actionListener.onFailure( + AlertingException( + "Could not resolve the index the given Comment came from", + RestStatus.INTERNAL_SERVER_ERROR, + IllegalStateException(), + ), ) - ) + } + + // if user is null because security plugin is not installed, anyone can delete any comment + // otherwise, only allow comment deletion if the deletion requester is the same as the comment's author + // or if the user is Admin + val canDelete = user == null || user.name == comment.user?.name || isAdmin(user) + + val deleteRequest = DeleteRequest(sourceIndex, commentId) + + if (canDelete) { + log.debug("Deleting the comment with id ${deleteRequest.id()}") + val deleteResponse = client.suspendUntil { delete(deleteRequest, it) } + actionListener.onResponse(DeleteCommentResponse(deleteResponse.id)) + } else { + actionListener.onFailure( + AlertingException("Not allowed to delete this comment!", RestStatus.FORBIDDEN, IllegalStateException()), + ) + } + } catch (t: Exception) { + log.error("Failed to delete comment $commentId", t) + actionListener.onFailure(AlertingException.wrap(t)) } + } - // if user is null because security plugin is not installed, anyone can delete any comment - // otherwise, only allow comment deletion if the deletion requester is the same as the comment's author - // or if the user is Admin - val canDelete = user == null || user.name == comment.user?.name || isAdmin(user) - - val deleteRequest = DeleteRequest(sourceIndex, commentId) - - if (canDelete) { - log.debug("Deleting the comment with id ${deleteRequest.id()}") - val deleteResponse = client.suspendUntil { delete(deleteRequest, it) } - actionListener.onResponse(DeleteCommentResponse(deleteResponse.id)) - } else { + private suspend fun getComment(): Comment? { + val queryBuilder = + QueryBuilders + .boolQuery() + .must(QueryBuilders.termsQuery("_id", commentId)) + val searchSourceBuilder = + SearchSourceBuilder() + .version(true) + .seqNoAndPrimaryTerm(true) + .query(queryBuilder) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(ALL_COMMENTS_INDEX_PATTERN) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + val comments = + searchResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val comment = Comment.parse(xcp, hit.id) + sourceIndex = hit.index + comment + } + + if (comments.isEmpty()) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Comment not found", RestStatus.NOT_FOUND), + ), + ) + return null + } else if (comments.size > 1) { actionListener.onFailure( - AlertingException("Not allowed to delete this comment!", RestStatus.FORBIDDEN, IllegalStateException()) + AlertingException.wrap(IllegalStateException("Multiple comments were found with the same ID")), ) + return null } - } catch (t: Exception) { - log.error("Failed to delete comment $commentId", t) - actionListener.onFailure(AlertingException.wrap(t)) - } - } - private suspend fun getComment(): Comment? { - val queryBuilder = QueryBuilders - .boolQuery() - .must(QueryBuilders.termsQuery("_id", commentId)) - val searchSourceBuilder = - SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .query(queryBuilder) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(ALL_COMMENTS_INDEX_PATTERN) - - val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - val comments = searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val comment = Comment.parse(xcp, hit.id) - sourceIndex = hit.index - comment + return comments[0] } - - if (comments.isEmpty()) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Comment not found", RestStatus.NOT_FOUND), - ), - ) - return null - } else if (comments.size > 1) { - actionListener.onFailure( - AlertingException.wrap(IllegalStateException("Multiple comments were found with the same ID")), - ) - return null - } - - return comments[0] } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt index 39b96f2b0..61a685738 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteMonitorAction.kt @@ -44,103 +44,116 @@ import org.opensearch.transport.client.Client private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) private val log = LogManager.getLogger(TransportDeleteMonitorAction::class.java) -class TransportDeleteMonitorAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - settings: Settings, - val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - AlertingActions.DELETE_MONITOR_ACTION_NAME, transportService, actionFilters, ::DeleteMonitorRequest -), - SecureTransportAction { - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - listenFilterBySettingChange(clusterService) - } - - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - val transformedRequest = request as? DeleteMonitorRequest - ?: recreateObject(request) { DeleteMonitorRequest(it) } - val user = readUserFromThreadContext(client) - - if (!validateUserBackendRoles(user, actionListener)) { - return - } - scope.launch { - DeleteMonitorHandler( - client, - actionListener, - user, - transformedRequest.monitorId - ).resolveUserAndStart(transformedRequest.refreshPolicy) +class TransportDeleteMonitorAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + AlertingActions.DELETE_MONITOR_ACTION_NAME, + transportService, + actionFilters, + ::DeleteMonitorRequest, + ), + SecureTransportAction { + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + listenFilterBySettingChange(clusterService) } - } - inner class DeleteMonitorHandler( - private val client: Client, - private val actionListener: ActionListener, - private val user: User?, - private val monitorId: String - ) { - suspend fun resolveUserAndStart(refreshPolicy: RefreshPolicy) { - try { - val monitor = getMonitor() ?: return // null means there was an issue retrieving the Monitor - - val canDelete = user == null || !doFilterForUser(user) || - checkUserPermissionsWithResource(user, monitor.user, actionListener, "monitor", monitorId) + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val transformedRequest = + request as? DeleteMonitorRequest + ?: recreateObject(request) { DeleteMonitorRequest(it) } + val user = readUserFromThreadContext(client) + + if (!validateUserBackendRoles(user, actionListener)) { + return + } + scope.launch { + DeleteMonitorHandler( + client, + actionListener, + user, + transformedRequest.monitorId, + ).resolveUserAndStart(transformedRequest.refreshPolicy) + } + } - if (DeleteMonitorService.monitorIsWorkflowDelegate(monitor.id)) { - actionListener.onFailure( - AlertingException( - "Monitor can't be deleted because it is a part of workflow(s)", - RestStatus.FORBIDDEN, - IllegalStateException() + inner class DeleteMonitorHandler( + private val client: Client, + private val actionListener: ActionListener, + private val user: User?, + private val monitorId: String, + ) { + suspend fun resolveUserAndStart(refreshPolicy: RefreshPolicy) { + try { + val monitor = getMonitor() ?: return // null means there was an issue retrieving the Monitor + + val canDelete = + user == null || !doFilterForUser(user) || + checkUserPermissionsWithResource(user, monitor.user, actionListener, "monitor", monitorId) + + if (DeleteMonitorService.monitorIsWorkflowDelegate(monitor.id)) { + actionListener.onFailure( + AlertingException( + "Monitor can't be deleted because it is a part of workflow(s)", + RestStatus.FORBIDDEN, + IllegalStateException(), + ), ) - ) - } else if (canDelete) { - actionListener.onResponse( - DeleteMonitorService.deleteMonitor(monitor, refreshPolicy) - ) - } else { - actionListener.onFailure( - AlertingException("Not allowed to delete this monitor!", RestStatus.FORBIDDEN, IllegalStateException()) - ) + } else if (canDelete) { + actionListener.onResponse( + DeleteMonitorService.deleteMonitor(monitor, refreshPolicy), + ) + } else { + actionListener.onFailure( + AlertingException("Not allowed to delete this monitor!", RestStatus.FORBIDDEN, IllegalStateException()), + ) + } + } catch (t: Exception) { + log.error("Failed to delete monitor $monitorId", t) + actionListener.onFailure(AlertingException.wrap(t)) } - } catch (t: Exception) { - log.error("Failed to delete monitor $monitorId", t) - actionListener.onFailure(AlertingException.wrap(t)) } - } - private suspend fun getMonitor(): Monitor? { - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) + private suspend fun getMonitor(): Monitor? { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorId) - val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } - if (!getResponse.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Monitor with $monitorId is not found", RestStatus.NOT_FOUND) + val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } + if (!getResponse.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Monitor with $monitorId is not found", RestStatus.NOT_FOUND), + ), ) - ) - } - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) + } + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, + ) + val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) - validateMonitorV1(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return null - } + validateMonitorV1(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return null + } - val monitor = scheduledJob as Monitor + val monitor = scheduledJob as Monitor - return monitor + return monitor + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt index 60e018e50..fe5ea9a05 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDeleteWorkflowAction.kt @@ -65,296 +65,330 @@ import org.opensearch.transport.TransportService import org.opensearch.transport.client.Client private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) + /** * Transport class that deletes the workflow. * If the deleteDelegateMonitor flag is set to true, deletes the workflow delegates that are not part of another workflow */ -class TransportDeleteWorkflowAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val lockService: LockService -) : HandledTransportAction( - AlertingActions.DELETE_WORKFLOW_ACTION_NAME, transportService, actionFilters, ::DeleteWorkflowRequest -), - SecureTransportAction { - private val log = LogManager.getLogger(javaClass) - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - listenFilterBySettingChange(clusterService) - } - - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - val transformedRequest = request as? DeleteWorkflowRequest - ?: recreateObject(request) { DeleteWorkflowRequest(it) } +class TransportDeleteWorkflowAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val lockService: LockService, + ) : HandledTransportAction( + AlertingActions.DELETE_WORKFLOW_ACTION_NAME, + transportService, + actionFilters, + ::DeleteWorkflowRequest, + ), + SecureTransportAction { + private val log = LogManager.getLogger(javaClass) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + listenFilterBySettingChange(clusterService) + } - val user = readUserFromThreadContext(client) - val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.workflowId) - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val transformedRequest = + request as? DeleteWorkflowRequest + ?: recreateObject(request) { DeleteWorkflowRequest(it) } + + val user = readUserFromThreadContext(client) + val deleteRequest = + DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.workflowId) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE) + + if (!validateUserBackendRoles(user, actionListener)) { + return + } - if (!validateUserBackendRoles(user, actionListener)) { - return + scope.launch { + DeleteWorkflowHandler( + client, + actionListener, + deleteRequest, + transformedRequest.deleteDelegateMonitors, + user, + transformedRequest.workflowId, + ).resolveUserAndStart() + } } - scope.launch { - DeleteWorkflowHandler( - client, - actionListener, - deleteRequest, - transformedRequest.deleteDelegateMonitors, - user, - transformedRequest.workflowId - ).resolveUserAndStart() - } - } - - inner class DeleteWorkflowHandler( - private val client: Client, - private val actionListener: ActionListener, - private val deleteRequest: DeleteRequest, - private val deleteDelegateMonitors: Boolean?, - private val user: User?, - private val workflowId: String, - ) { - suspend fun resolveUserAndStart() { - try { - val workflow: Workflow = getWorkflow() ?: return - - val canDelete = user == null || - !doFilterForUser(user) || - checkUserPermissionsWithResource( - user, - workflow.user, - actionListener, - "workflow", - workflowId - ) + inner class DeleteWorkflowHandler( + private val client: Client, + private val actionListener: ActionListener, + private val deleteRequest: DeleteRequest, + private val deleteDelegateMonitors: Boolean?, + private val user: User?, + private val workflowId: String, + ) { + suspend fun resolveUserAndStart() { + try { + val workflow: Workflow = getWorkflow() ?: return + + val canDelete = + user == null || + !doFilterForUser(user) || + checkUserPermissionsWithResource( + user, + workflow.user, + actionListener, + "workflow", + workflowId, + ) - if (canDelete) { - val delegateMonitorIds = (workflow.inputs[0] as CompositeInput).getMonitorIds() - var deletableMonitors = listOf() - val delegateMonitors = getDeletableDelegates(workflowId, delegateMonitorIds, user) - // User can only delete the delegate monitors only in the case if all monitors can be deleted - // if there are monitors in this workflow that are referenced in other workflows, we cannot delete the monitors. - // We will not partially delete monitors. we delete them all or fail the request. - if (deleteDelegateMonitors == true) { - deletableMonitors = delegateMonitors - val monitorsDiff = delegateMonitorIds.toMutableList() - monitorsDiff.removeAll(deletableMonitors.map { it.id }) - - if (monitorsDiff.isNotEmpty()) { - actionListener.onFailure( - AlertingException( - "Not allowed to delete ${monitorsDiff.joinToString()} monitors", - RestStatus.FORBIDDEN, - IllegalStateException() + if (canDelete) { + val delegateMonitorIds = (workflow.inputs[0] as CompositeInput).getMonitorIds() + var deletableMonitors = listOf() + val delegateMonitors = getDeletableDelegates(workflowId, delegateMonitorIds, user) + // User can only delete the delegate monitors only in the case if all monitors can be deleted + // if there are monitors in this workflow that are referenced in other workflows, we cannot delete the monitors. + // We will not partially delete monitors. we delete them all or fail the request. + if (deleteDelegateMonitors == true) { + deletableMonitors = delegateMonitors + val monitorsDiff = delegateMonitorIds.toMutableList() + monitorsDiff.removeAll(deletableMonitors.map { it.id }) + + if (monitorsDiff.isNotEmpty()) { + actionListener.onFailure( + AlertingException( + "Not allowed to delete ${monitorsDiff.joinToString()} monitors", + RestStatus.FORBIDDEN, + IllegalStateException(), + ), ) - ) - return + return + } } - } - val deleteResponse = deleteWorkflow(deleteRequest) - var deleteWorkflowResponse = DeleteWorkflowResponse(deleteResponse.id, deleteResponse.version) + val deleteResponse = deleteWorkflow(deleteRequest) + var deleteWorkflowResponse = DeleteWorkflowResponse(deleteResponse.id, deleteResponse.version) - val workflowMetadataId = WorkflowMetadata.getId(workflow.id) + val workflowMetadataId = WorkflowMetadata.getId(workflow.id) - val metadataIdsToDelete = mutableListOf(workflowMetadataId) + val metadataIdsToDelete = mutableListOf(workflowMetadataId) - if (deleteDelegateMonitors == true) { - val failedMonitorIds = tryDeletingMonitors(deletableMonitors, RefreshPolicy.IMMEDIATE) - // Update delete workflow response - deleteWorkflowResponse.nonDeletedMonitors = failedMonitorIds - } + if (deleteDelegateMonitors == true) { + val failedMonitorIds = tryDeletingMonitors(deletableMonitors, RefreshPolicy.IMMEDIATE) + // Update delete workflow response + deleteWorkflowResponse.nonDeletedMonitors = failedMonitorIds + } - // Delete monitors workflow metadata - // Monitor metadata will be in workflowId-metadata-monitorId-metadata format - metadataIdsToDelete.addAll(delegateMonitors.map { MonitorMetadata.getId(it, workflowMetadataId) }) - try { - // Delete the monitors workflow metadata - val deleteMonitorWorkflowMetadataResponse: BulkByScrollResponse = client.suspendUntil { - DeleteByQueryRequestBuilder(this, DeleteByQueryAction.INSTANCE) - .source(ScheduledJob.SCHEDULED_JOBS_INDEX) - .filter(QueryBuilders.idsQuery().addIds(*metadataIdsToDelete.toTypedArray())) - .execute(it) + // Delete monitors workflow metadata + // Monitor metadata will be in workflowId-metadata-monitorId-metadata format + metadataIdsToDelete.addAll(delegateMonitors.map { MonitorMetadata.getId(it, workflowMetadataId) }) + try { + // Delete the monitors workflow metadata + val deleteMonitorWorkflowMetadataResponse: BulkByScrollResponse = + client.suspendUntil { + DeleteByQueryRequestBuilder(this, DeleteByQueryAction.INSTANCE) + .source(ScheduledJob.SCHEDULED_JOBS_INDEX) + .filter(QueryBuilders.idsQuery().addIds(*metadataIdsToDelete.toTypedArray())) + .execute(it) + } + } catch (t: Exception) { + log.error("Failed to delete delegate monitor metadata. But proceeding with workflow deletion $workflowId", t) } - } catch (t: Exception) { - log.error("Failed to delete delegate monitor metadata. But proceeding with workflow deletion $workflowId", t) - } - try { - // Delete the workflow lock - client.suspendUntil { lockService.deleteLock(LockModel.generateLockId(workflowId), it) } - } catch (t: Exception) { - log.error("Failed to delete workflow lock for $workflowId") - } - actionListener.onResponse(deleteWorkflowResponse) - } else { - actionListener.onFailure( - AlertingException( - "Not allowed to delete this workflow!", - RestStatus.FORBIDDEN, - IllegalStateException() + try { + // Delete the workflow lock + client.suspendUntil { lockService.deleteLock(LockModel.generateLockId(workflowId), it) } + } catch (t: Exception) { + log.error("Failed to delete workflow lock for $workflowId") + } + actionListener.onResponse(deleteWorkflowResponse) + } else { + actionListener.onFailure( + AlertingException( + "Not allowed to delete this workflow!", + RestStatus.FORBIDDEN, + IllegalStateException(), + ), ) - ) - } - } catch (t: Exception) { - if (t is IndexNotFoundException) { - actionListener.onFailure( - OpenSearchStatusException( - "Workflow not found.", - RestStatus.NOT_FOUND + } + } catch (t: Exception) { + if (t is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Workflow not found.", + RestStatus.NOT_FOUND, + ), ) - ) - } else { - log.error("Failed to delete workflow $workflowId", t) - actionListener.onFailure(AlertingException.wrap(t)) - } - } - } - - /** - * Tries to delete the given list of the monitors. Return value contains all the monitorIds for which deletion failed - * @param monitorIds list of monitor ids to be deleted - * @param refreshPolicy - * @return list of the monitors that were not deleted - */ - private suspend fun tryDeletingMonitors(monitors: List, refreshPolicy: RefreshPolicy): List { - val nonDeletedMonitorIds = mutableListOf() - for (monitor in monitors) { - try { - DeleteMonitorService.deleteMonitor(monitor, refreshPolicy) - } catch (ex: Exception) { - log.error("failed to delete delegate monitor ${monitor.id} for $workflowId") - nonDeletedMonitorIds.add(monitor.id) + } else { + log.error("Failed to delete workflow $workflowId", t) + actionListener.onFailure(AlertingException.wrap(t)) + } } } - return nonDeletedMonitorIds - } - /** - * Returns lit of monitor ids belonging only to a given workflow. - * if filterBy is enabled, it filters and returns only those monitors which user has permission to delete. - * @param workflowIdToBeDeleted Id of the workflow that should be deleted - * @param monitorIds List of delegate monitor ids (underlying monitor ids) - */ - private suspend fun getDeletableDelegates(workflowIdToBeDeleted: String, monitorIds: List, user: User?): List { - // Retrieve monitors belonging to another workflows - val queryBuilder = QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery("_id", workflowIdToBeDeleted)).filter( - QueryBuilders.nestedQuery( - WORKFLOW_DELEGATE_PATH, - QueryBuilders.boolQuery().must( - QueryBuilders.termsQuery( - WORKFLOW_MONITOR_PATH, - monitorIds - ) - ), - ScoreMode.None - ) - ) - - val searchRequest = SearchRequest() - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) - .source(SearchSourceBuilder().query(queryBuilder)) - - val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - - val workflows = searchResponse.hits.hits.map { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, XContentType.JSON - ).also { it.nextToken() } - lateinit var workflow: Workflow - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - xcp.nextToken() - when (xcp.currentName()) { - "workflow" -> workflow = Workflow.parse(xcp) + /** + * Tries to delete the given list of the monitors. Return value contains all the monitorIds for which deletion failed + * @param monitorIds list of monitor ids to be deleted + * @param refreshPolicy + * @return list of the monitors that were not deleted + */ + private suspend fun tryDeletingMonitors( + monitors: List, + refreshPolicy: RefreshPolicy, + ): List { + val nonDeletedMonitorIds = mutableListOf() + for (monitor in monitors) { + try { + DeleteMonitorService.deleteMonitor(monitor, refreshPolicy) + } catch (ex: Exception) { + log.error("failed to delete delegate monitor ${monitor.id} for $workflowId") + nonDeletedMonitorIds.add(monitor.id) } } - workflow.copy(id = hit.id, version = hit.version) + return nonDeletedMonitorIds } - val workflowMonitors = workflows.flatMap { (it.inputs[0] as CompositeInput).getMonitorIds() }.distinct() - // Monitors that can be deleted -> all workflow delegates - monitors belonging to different workflows - val deletableMonitorIds = monitorIds.minus(workflowMonitors.toSet()) - // filtering further to get the list of monitors that user has permission to delete if filterby is enabled and user is not null - val query = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", deletableMonitorIds)) - val searchSource = SearchSourceBuilder().query(query) - val monitorSearchRequest = SearchRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).source(searchSource) - - if (user != null && filterByEnabled) { - addFilter(user, monitorSearchRequest.source(), "monitor.user.backend_roles.keyword") - } + /** + * Returns lit of monitor ids belonging only to a given workflow. + * if filterBy is enabled, it filters and returns only those monitors which user has permission to delete. + * @param workflowIdToBeDeleted Id of the workflow that should be deleted + * @param monitorIds List of delegate monitor ids (underlying monitor ids) + */ + private suspend fun getDeletableDelegates( + workflowIdToBeDeleted: String, + monitorIds: List, + user: User?, + ): List { + // Retrieve monitors belonging to another workflows + val queryBuilder = + QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery("_id", workflowIdToBeDeleted)).filter( + QueryBuilders.nestedQuery( + WORKFLOW_DELEGATE_PATH, + QueryBuilders.boolQuery().must( + QueryBuilders.termsQuery( + WORKFLOW_MONITOR_PATH, + monitorIds, + ), + ), + ScoreMode.None, + ), + ) - val searchMonitorResponse: SearchResponse = client.suspendUntil { search(monitorSearchRequest, it) } - if (searchMonitorResponse.isTimedOut) { - throw OpenSearchException("Cannot determine that the ${ScheduledJob.SCHEDULED_JOBS_INDEX} index is healthy") - } - val deletableMonitors = mutableListOf() - for (hit in searchMonitorResponse.hits) { - XContentType.JSON.xContent().createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val scheduledJob = ScheduledJob.parse(hitsParser, hit.id, hit.version) - - validateMonitorV1(scheduledJob)?.let { - throw OpenSearchException(it) + val searchRequest = + SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .source(SearchSourceBuilder().query(queryBuilder)) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + + val workflows = + searchResponse.hits.hits.map { hit -> + val xcp = + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ).also { it.nextToken() } + lateinit var workflow: Workflow + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow" -> workflow = Workflow.parse(xcp) + } + } + workflow.copy(id = hit.id, version = hit.version) } + val workflowMonitors = workflows.flatMap { (it.inputs[0] as CompositeInput).getMonitorIds() }.distinct() + // Monitors that can be deleted -> all workflow delegates - monitors belonging to different workflows + val deletableMonitorIds = monitorIds.minus(workflowMonitors.toSet()) + + // filtering further to get the list of monitors that user has permission to delete if filterby is enabled and user is not null + val query = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", deletableMonitorIds)) + val searchSource = SearchSourceBuilder().query(query) + val monitorSearchRequest = SearchRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).source(searchSource) - val monitor = scheduledJob as Monitor - deletableMonitors.add(monitor) + if (user != null && filterByEnabled) { + addFilter(user, monitorSearchRequest.source(), "monitor.user.backend_roles.keyword") + } + + val searchMonitorResponse: SearchResponse = client.suspendUntil { search(monitorSearchRequest, it) } + if (searchMonitorResponse.isTimedOut) { + throw OpenSearchException("Cannot determine that the ${ScheduledJob.SCHEDULED_JOBS_INDEX} index is healthy") + } + val deletableMonitors = mutableListOf() + for (hit in searchMonitorResponse.hits) { + XContentType.JSON + .xContent() + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + val scheduledJob = ScheduledJob.parse(hitsParser, hit.id, hit.version) + + validateMonitorV1(scheduledJob)?.let { + throw OpenSearchException(it) + } + + val monitor = scheduledJob as Monitor + deletableMonitors.add(monitor) + } } + + return deletableMonitors } - return deletableMonitors - } + private suspend fun getWorkflow(): Workflow? { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId) - private suspend fun getWorkflow(): Workflow? { - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, workflowId) + val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } + if (!getResponse.isExists) { + handleWorkflowMissing() + return null + } - val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } - if (!getResponse.isExists) { - handleWorkflowMissing() - return null + return parseWorkflow(getResponse) } - return parseWorkflow(getResponse) - } - - private fun handleWorkflowMissing() { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Workflow not found.", RestStatus.NOT_FOUND) + private fun handleWorkflowMissing() { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Workflow not found.", RestStatus.NOT_FOUND), + ), ) - ) - } + } - private fun parseWorkflow(getResponse: GetResponse): Workflow? { - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) - validateMonitorV1(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return null + private fun parseWorkflow(getResponse: GetResponse): Workflow? { + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, + ) + val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) + validateMonitorV1(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return null + } + return scheduledJob as Workflow } - return scheduledJob as Workflow - } - private suspend fun deleteWorkflow(deleteRequest: DeleteRequest): DeleteResponse { - log.debug("Deleting the workflow with id ${deleteRequest.id()}") - return client.suspendUntil { delete(deleteRequest, it) } - } + private suspend fun deleteWorkflow(deleteRequest: DeleteRequest): DeleteResponse { + log.debug("Deleting the workflow with id ${deleteRequest.id()}") + return client.suspendUntil { delete(deleteRequest, it) } + } - private suspend fun deleteWorkflowMetadata(workflow: Workflow) { - val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, WorkflowMetadata.getId(workflow.id)) - val deleteResponse: DeleteResponse = client.suspendUntil { delete(deleteRequest, it) } + private suspend fun deleteWorkflowMetadata(workflow: Workflow) { + val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, WorkflowMetadata.getId(workflow.id)) + val deleteResponse: DeleteResponse = client.suspendUntil { delete(deleteRequest, it) } + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDocLevelMonitorFanOutAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDocLevelMonitorFanOutAction.kt index 83c088c29..139a867c8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDocLevelMonitorFanOutAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportDocLevelMonitorFanOutAction.kt @@ -132,680 +132,770 @@ private val log = LogManager.getLogger(TransportDocLevelMonitorFanOutAction::cla private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) class TransportDocLevelMonitorFanOutAction -@Inject constructor( - transportService: TransportService, - val client: Client, - val actionFilters: ActionFilters, - val clusterService: ClusterService, - val triggerService: TriggerService, - val alertService: AlertService, - val scriptService: ScriptService, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, -) : HandledTransportAction( - DocLevelMonitorFanOutAction.NAME, transportService, actionFilters, ::DocLevelMonitorFanOutRequest -), - SecureTransportAction { - var nonPercolateSearchesTimeTakenStat = 0L - var percolateQueriesTimeTakenStat = 0L - var totalDocsQueriedStat = 0L - var docTransformTimeTakenStat = 0L - var totalDocsSizeInBytesStat = 0L - var docsSizeOfBatchInBytes = 0L - var findingsToTriggeredQueries: Map> = mutableMapOf() - - @Volatile var percQueryMaxNumDocsInMemory: Int = PERCOLATE_QUERY_MAX_NUM_DOCS_IN_MEMORY.get(settings) - @Volatile var docLevelMonitorFanoutMaxDuration = DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION.get(settings) - @Volatile var percQueryDocsSizeMemoryPercentageLimit: Int = PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT.get(settings) - @Volatile var docLevelMonitorShardFetchSize: Int = DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE.get(settings) - @Volatile var findingsIndexBatchSize: Int = FINDINGS_INDEXING_BATCH_SIZE.get(settings) - @Volatile var maxActionableAlertCount: Long = MAX_ACTIONABLE_ALERT_COUNT.get(settings) - @Volatile var retryPolicy = BackoffPolicy.constantBackoff(ALERT_BACKOFF_MILLIS.get(settings), ALERT_BACKOFF_COUNT.get(settings)) - @Volatile var allowList: List = DestinationSettings.ALLOW_LIST.get(settings) - @Volatile var fetchOnlyQueryFieldNames = DOC_LEVEL_MONITOR_FETCH_ONLY_QUERY_FIELDS_ENABLED.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(PERCOLATE_QUERY_MAX_NUM_DOCS_IN_MEMORY) { - percQueryMaxNumDocsInMemory = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION) { - docLevelMonitorFanoutMaxDuration = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT) { - percQueryDocsSizeMemoryPercentageLimit = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE) { - docLevelMonitorShardFetchSize = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(FINDINGS_INDEXING_BATCH_SIZE) { - findingsIndexBatchSize = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_ACTIONABLE_ALERT_COUNT) { - maxActionableAlertCount = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERT_BACKOFF_MILLIS, ALERT_BACKOFF_COUNT) { millis, count -> - retryPolicy = BackoffPolicy.constantBackoff(millis, count) - } - clusterService.clusterSettings.addSettingsUpdateConsumer(DestinationSettings.ALLOW_LIST) { - allowList = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(DOC_LEVEL_MONITOR_FETCH_ONLY_QUERY_FIELDS_ENABLED) { - fetchOnlyQueryFieldNames = it + @Inject + constructor( + transportService: TransportService, + val client: Client, + val actionFilters: ActionFilters, + val clusterService: ClusterService, + val triggerService: TriggerService, + val alertService: AlertService, + val scriptService: ScriptService, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + DocLevelMonitorFanOutAction.NAME, + transportService, + actionFilters, + ::DocLevelMonitorFanOutRequest, + ), + SecureTransportAction { + var nonPercolateSearchesTimeTakenStat = 0L + var percolateQueriesTimeTakenStat = 0L + var totalDocsQueriedStat = 0L + var docTransformTimeTakenStat = 0L + var totalDocsSizeInBytesStat = 0L + var docsSizeOfBatchInBytes = 0L + var findingsToTriggeredQueries: Map> = mutableMapOf() + + @Volatile var percQueryMaxNumDocsInMemory: Int = PERCOLATE_QUERY_MAX_NUM_DOCS_IN_MEMORY.get(settings) + + @Volatile var docLevelMonitorFanoutMaxDuration = DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION.get(settings) + + @Volatile var percQueryDocsSizeMemoryPercentageLimit: Int = PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT.get(settings) + + @Volatile var docLevelMonitorShardFetchSize: Int = DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE.get(settings) + + @Volatile var findingsIndexBatchSize: Int = FINDINGS_INDEXING_BATCH_SIZE.get(settings) + + @Volatile var maxActionableAlertCount: Long = MAX_ACTIONABLE_ALERT_COUNT.get(settings) + + @Volatile var retryPolicy = BackoffPolicy.constantBackoff(ALERT_BACKOFF_MILLIS.get(settings), ALERT_BACKOFF_COUNT.get(settings)) + + @Volatile var allowList: List = DestinationSettings.ALLOW_LIST.get(settings) + + @Volatile var fetchOnlyQueryFieldNames = DOC_LEVEL_MONITOR_FETCH_ONLY_QUERY_FIELDS_ENABLED.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(PERCOLATE_QUERY_MAX_NUM_DOCS_IN_MEMORY) { + percQueryMaxNumDocsInMemory = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(DOC_LEVEL_MONITOR_FANOUT_MAX_DURATION) { + docLevelMonitorFanoutMaxDuration = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(PERCOLATE_QUERY_DOCS_SIZE_MEMORY_PERCENTAGE_LIMIT) { + percQueryDocsSizeMemoryPercentageLimit = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(DOC_LEVEL_MONITOR_SHARD_FETCH_SIZE) { + docLevelMonitorShardFetchSize = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(FINDINGS_INDEXING_BATCH_SIZE) { + findingsIndexBatchSize = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_ACTIONABLE_ALERT_COUNT) { + maxActionableAlertCount = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERT_BACKOFF_MILLIS, ALERT_BACKOFF_COUNT) { millis, count -> + retryPolicy = BackoffPolicy.constantBackoff(millis, count) + } + clusterService.clusterSettings.addSettingsUpdateConsumer(DestinationSettings.ALLOW_LIST) { + allowList = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(DOC_LEVEL_MONITOR_FETCH_ONLY_QUERY_FIELDS_ENABLED) { + fetchOnlyQueryFieldNames = it + } } - } - @Volatile - override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + @Volatile + override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - override fun doExecute( - task: Task, - request: DocLevelMonitorFanOutRequest, - listener: ActionListener - ) { - scope.launch { - executeMonitor(request, listener) + override fun doExecute( + task: Task, + request: DocLevelMonitorFanOutRequest, + listener: ActionListener, + ) { + scope.launch { + executeMonitor(request, listener) + } } - } - private suspend fun executeMonitor( - request: DocLevelMonitorFanOutRequest, - listener: ActionListener - ) { - try { - log.info("Starting fan_out for doc level monitor ${request.monitor.id}. ExecutionId: ${request.executionId}") - val startTime = System.currentTimeMillis() - val endTime = Instant.now().plusMillis(docLevelMonitorFanoutMaxDuration.millis()) - val monitor = request.monitor - var monitorResult = MonitorRunResult(monitor.name, Instant.now(), Instant.now()) - val updatedIndexNames = request.indexExecutionContext!!.updatedIndexNames - val monitorMetadata = request.monitorMetadata - val shardIds = request.shardIds - val indexExecutionContext = request.indexExecutionContext - val concreteIndicesSeenSoFar = request.concreteIndicesSeenSoFar - val dryrun = request.dryRun - val executionId = request.executionId - val workflowRunContext = request.workflowRunContext - - val queryToDocIds = mutableMapOf>() - val inputRunResults = mutableMapOf>() - val docsToQueries = mutableMapOf>() - val transformedDocs = mutableListOf>() - val findingIdToDocSource = mutableMapOf() - val isTempMonitor = dryrun || monitor.id == Monitor.NO_ID - - val docLevelMonitorInput = request.monitor.inputs[0] as DocLevelMonitorInput - val queries: List = docLevelMonitorInput.queries - val fieldsToBeQueried = mutableSetOf() - if (fetchOnlyQueryFieldNames) { - for (it in queries) { - if (it.queryFieldNames.isEmpty()) { - fieldsToBeQueried.clear() + private suspend fun executeMonitor( + request: DocLevelMonitorFanOutRequest, + listener: ActionListener, + ) { + try { + log.info("Starting fan_out for doc level monitor ${request.monitor.id}. ExecutionId: ${request.executionId}") + val startTime = System.currentTimeMillis() + val endTime = Instant.now().plusMillis(docLevelMonitorFanoutMaxDuration.millis()) + val monitor = request.monitor + var monitorResult = MonitorRunResult(monitor.name, Instant.now(), Instant.now()) + val updatedIndexNames = request.indexExecutionContext!!.updatedIndexNames + val monitorMetadata = request.monitorMetadata + val shardIds = request.shardIds + val indexExecutionContext = request.indexExecutionContext + val concreteIndicesSeenSoFar = request.concreteIndicesSeenSoFar + val dryrun = request.dryRun + val executionId = request.executionId + val workflowRunContext = request.workflowRunContext + + val queryToDocIds = mutableMapOf>() + val inputRunResults = mutableMapOf>() + val docsToQueries = mutableMapOf>() + val transformedDocs = mutableListOf>() + val findingIdToDocSource = mutableMapOf() + val isTempMonitor = dryrun || monitor.id == Monitor.NO_ID + + val docLevelMonitorInput = request.monitor.inputs[0] as DocLevelMonitorInput + val queries: List = docLevelMonitorInput.queries + val fieldsToBeQueried = mutableSetOf() + if (fetchOnlyQueryFieldNames) { + for (it in queries) { + if (it.queryFieldNames.isEmpty()) { + fieldsToBeQueried.clear() + log.debug( + "Monitor ${request.monitor.id} : " + + "Doc Level query ${it.id} : ${it.query} doesn't have queryFieldNames populated. " + + "Cannot optimize monitor to fetch only query-relevant fields. " + + "Querying entire doc source.", + ) + break + } + fieldsToBeQueried.addAll(it.queryFieldNames) + } + if (fieldsToBeQueried.isNotEmpty()) { log.debug( - "Monitor ${request.monitor.id} : " + - "Doc Level query ${it.id} : ${it.query} doesn't have queryFieldNames populated. " + - "Cannot optimize monitor to fetch only query-relevant fields. " + - "Querying entire doc source." + "Monitor ${monitor.id} Querying only fields " + + "${fieldsToBeQueried.joinToString()} instead of entire _source of documents", ) - break } - fieldsToBeQueried.addAll(it.queryFieldNames) - } - if (fieldsToBeQueried.isNotEmpty()) { - log.debug( - "Monitor ${monitor.id} Querying only fields " + - "${fieldsToBeQueried.joinToString()} instead of entire _source of documents" - ) } - } - fetchShardDataAndMaybeExecutePercolateQueries( - monitor, - endTime, - indexExecutionContext!!, - monitorMetadata, - inputRunResults, - docsToQueries, - updatedIndexNames, - concreteIndicesSeenSoFar, - ArrayList(fieldsToBeQueried), - shardIds.map { it.id }, - transformedDocs - ) { shard, maxSeqNo -> // function passed to update last run context with new max sequence number - indexExecutionContext.updatedLastRunContext[shard] = maxSeqNo - } - if (transformedDocs.isNotEmpty()) { - performPercolateQueryAndResetCounters( + fetchShardDataAndMaybeExecutePercolateQueries( monitor, + endTime, + indexExecutionContext!!, monitorMetadata, - updatedIndexNames, - concreteIndicesSeenSoFar, inputRunResults, docsToQueries, - transformedDocs - ) - } - monitorResult = monitorResult.copy(inputResults = InputRunResults(listOf(inputRunResults))) + updatedIndexNames, + concreteIndicesSeenSoFar, + ArrayList(fieldsToBeQueried), + shardIds.map { it.id }, + transformedDocs, + ) { shard, maxSeqNo -> + // function passed to update last run context with new max sequence number + indexExecutionContext.updatedLastRunContext[shard] = maxSeqNo + } + if (transformedDocs.isNotEmpty()) { + performPercolateQueryAndResetCounters( + monitor, + monitorMetadata, + updatedIndexNames, + concreteIndicesSeenSoFar, + inputRunResults, + docsToQueries, + transformedDocs, + ) + } + monitorResult = monitorResult.copy(inputResults = InputRunResults(listOf(inputRunResults))) /* populate the map queryToDocIds with pairs of this fixes the issue of passing id, name, tags fields of DocLevelQuery object correctly to TriggerExpressionParser */ - queries.forEach { - if (inputRunResults.containsKey(it.id)) { - queryToDocIds[it] = inputRunResults[it.id]!! + queries.forEach { + if (inputRunResults.containsKey(it.id)) { + queryToDocIds[it] = inputRunResults[it.id]!! + } } - } - val idQueryMap: Map = queries.associateBy { it.id } + val idQueryMap: Map = queries.associateBy { it.id } - val triggerResults = mutableMapOf() - // If there are no triggers defined, we still want to generate findings - if (monitor.triggers.isEmpty()) { - if (dryrun == false && monitor.id != Monitor.NO_ID) { - createFindings(monitor, docsToQueries, idQueryMap, true) + val triggerResults = mutableMapOf() + // If there are no triggers defined, we still want to generate findings + if (monitor.triggers.isEmpty()) { + if (dryrun == false && monitor.id != Monitor.NO_ID) { + createFindings(monitor, docsToQueries, idQueryMap, true) + } + } else { + /* + * if should_persist_findings_and_alerts flag is not set, doc-level trigger generates alerts else doc-level trigger + * generates a single alert with multiple findings. + */ + if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { + monitor.triggers.forEach { + triggerResults[it.id] = + runForEachDocTrigger( + monitorResult, + it as DocumentLevelTrigger, + monitor, + idQueryMap, + docsToQueries, + queryToDocIds, + dryrun, + executionId = executionId, + findingIdToDocSource, + workflowRunContext = workflowRunContext, + ) + } + } else if (monitor.shouldCreateSingleAlertForFindings == true) { + monitor.triggers.forEach { + triggerResults[it.id] = + runForEachDocTriggerCreateSingleGroupedAlert( + monitorResult, + it as DocumentLevelTrigger, + monitor, + queryToDocIds, + dryrun, + executionId, + workflowRunContext, + ) + } + } } - } else { - /** - * if should_persist_findings_and_alerts flag is not set, doc-level trigger generates alerts else doc-level trigger - * generates a single alert with multiple findings. - */ - if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { - monitor.triggers.forEach { - triggerResults[it.id] = runForEachDocTrigger( - monitorResult, - it as DocumentLevelTrigger, - monitor, - idQueryMap, - docsToQueries, - queryToDocIds, - dryrun, + + if (!isTempMonitor) { + // If any error happened during trigger execution, upsert monitor error alert + val errorMessage = constructErrorMessageFromTriggerResults(triggerResults = triggerResults) + log.info(errorMessage) + if (errorMessage.isNotEmpty()) { + alertService.upsertMonitorErrorAlert( + monitor = monitor, + errorMessage = errorMessage, executionId = executionId, - findingIdToDocSource, - workflowRunContext = workflowRunContext + workflowRunContext, ) + } else { + onSuccessfulMonitorRun(monitor) } - } else if (monitor.shouldCreateSingleAlertForFindings == true) { - monitor.triggers.forEach { - triggerResults[it.id] = runForEachDocTriggerCreateSingleGroupedAlert( - monitorResult, - it as DocumentLevelTrigger, - monitor, - queryToDocIds, - dryrun, - executionId, - workflowRunContext + } + + listener.onResponse( + DocLevelMonitorFanOutResponse( + nodeId = clusterService.localNode().id, + executionId = request.executionId, + monitorId = monitor.id, + indexExecutionContext.updatedLastRunContext, + InputRunResults(listOf(inputRunResults)), + triggerResults, + ), + ) + val completedTime = System.currentTimeMillis() + val fanoutDuration = completedTime - startTime + log.info( + "Completed fan_out for doc level monitor ${request.monitor.id} in $fanoutDuration ms. ExecutionId: ${request.executionId}", + ) + } catch (e: Exception) { + log.error( + "${request.monitor.id} Failed to run fan_out on node ${clusterService.localNode().id}." + + " for Monitor Type ${request.monitor.monitorType} ExecutionId ${request.executionId}", + e, + ) + listener.onFailure(AlertingException.wrap(e)) + } + } + + /** + * run doc-level triggers ignoring findings and alerts and generating a single alert. + */ + private suspend fun runForEachDocTriggerCreateSingleGroupedAlert( + monitorResult: MonitorRunResult, + trigger: DocumentLevelTrigger, + monitor: Monitor, + queryToDocIds: Map>, + dryrun: Boolean, + executionId: String, + workflowRunContext: WorkflowRunContext?, + ): DocumentLevelTriggerRunResult { + val triggerResult = triggerService.runDocLevelTrigger(monitor, trigger, queryToDocIds) + if (triggerResult.triggeredDocs.isNotEmpty()) { + val findingIds = + if (workflowRunContext?.findingIds != null) { + workflowRunContext.findingIds + } else { + listOf() + } + val triggerCtx = DocumentLevelTriggerExecutionContext(monitor, trigger) + val alert = + alertService.composeDocLevelAlert( + findingIds!!, + triggerResult.triggeredDocs, + triggerCtx, + monitorResult.alertError() ?: triggerResult.alertError(), + executionId = executionId, + workflorwRunContext = workflowRunContext, + ) + for (action in trigger.actions) { + this.runAction(action, triggerCtx.copy(alerts = listOf(AlertContext(alert))), monitor, dryrun) + } + + if (!dryrun && monitor.id != Monitor.NO_ID) { + val actionResults = triggerResult.actionResultsMap.getOrDefault(alert.id, emptyMap()) + val actionExecutionResults = + actionResults.values.map { actionRunResult -> + ActionExecutionResult( + actionRunResult.actionId, + actionRunResult.executionTime, + if (actionRunResult.throttled) 1 else 0, + ) + } + val updatedAlert = alert.copy(actionExecutionResults = actionExecutionResults) + + retryPolicy.let { + alertService.saveAlerts( + monitor.dataSources, + listOf(updatedAlert), + it, + routingId = monitor.id, ) } } } + return DocumentLevelTriggerRunResult(trigger.name, listOf(), monitorResult.error) + } + + private suspend fun runForEachDocTrigger( + monitorResult: MonitorRunResult, + trigger: DocumentLevelTrigger, + monitor: Monitor, + idQueryMap: Map, + docsToQueries: MutableMap>, + queryToDocIds: Map>, + dryrun: Boolean, + executionId: String, + findingIdToDocSource: MutableMap, + workflowRunContext: WorkflowRunContext?, + ): DocumentLevelTriggerRunResult { + val triggerCtx = DocumentLevelTriggerExecutionContext(monitor, trigger) + val triggerResult = triggerService.runDocLevelTrigger(monitor, trigger, queryToDocIds) + + val triggerFindingDocPairs = mutableListOf>() + + // TODO: Implement throttling for findings + val findingToDocPairs = + createFindings( + monitor, + docsToQueries, + idQueryMap, + !dryrun && monitor.id != Monitor.NO_ID, + executionId, + ) + + findingToDocPairs.forEach { + // Only pick those entries whose docs have triggers associated with them + if (triggerResult.triggeredDocs.contains(it.second)) { + triggerFindingDocPairs.add(Pair(it.first, it.second)) + } + } + + val actionCtx = + triggerCtx.copy( + triggeredDocs = triggerResult.triggeredDocs, + relatedFindings = findingToDocPairs.map { it.first }, + error = monitorResult.error ?: triggerResult.error, + ) + + if (printsSampleDocData(trigger) && triggerFindingDocPairs.isNotEmpty()) { + getDocSources( + findingToDocPairs = findingToDocPairs, + monitor = monitor, + findingIdToDocSource = findingIdToDocSource, + ) + } - if (!isTempMonitor) { - // If any error happened during trigger execution, upsert monitor error alert - val errorMessage = constructErrorMessageFromTriggerResults(triggerResults = triggerResults) - log.info(errorMessage) - if (errorMessage.isNotEmpty()) { - alertService.upsertMonitorErrorAlert( - monitor = monitor, - errorMessage = errorMessage, + val alerts = mutableListOf() + val alertContexts = mutableListOf() + triggerFindingDocPairs.forEach { + val alert = + alertService.composeDocLevelAlert( + listOf(it.first), + listOf(it.second), + triggerCtx, + monitorResult.alertError() ?: triggerResult.alertError(), executionId = executionId, - workflowRunContext + workflorwRunContext = workflowRunContext, ) - } else { - onSuccessfulMonitorRun(monitor) - } + alerts.add(alert) + + val docSource = findingIdToDocSource[alert.findingIds.first()]?.response?.convertToMap() + + alertContexts.add( + AlertContext( + alert = alert, + associatedQueries = + alert.findingIds.flatMap { findingId -> + findingsToTriggeredQueries.getOrDefault(findingId, emptyList()) ?: emptyList() + }, + sampleDocs = listOfNotNull(docSource), + ), + ) } - listener.onResponse( - DocLevelMonitorFanOutResponse( - nodeId = clusterService.localNode().id, - executionId = request.executionId, + val shouldDefaultToPerExecution = + defaultToPerExecutionAction( + maxActionableAlertCount, monitorId = monitor.id, - indexExecutionContext.updatedLastRunContext, - InputRunResults(listOf(inputRunResults)), - triggerResults + triggerId = trigger.id, + totalActionableAlertCount = alerts.size, + monitorOrTriggerError = actionCtx.error, ) - ) - val completedTime = System.currentTimeMillis() - val fanoutDuration = completedTime - startTime - log.info( - "Completed fan_out for doc level monitor ${request.monitor.id} in $fanoutDuration ms. ExecutionId: ${request.executionId}" - ) - } catch (e: Exception) { - log.error( - "${request.monitor.id} Failed to run fan_out on node ${clusterService.localNode().id}." + - " for Monitor Type ${request.monitor.monitorType} ExecutionId ${request.executionId}", - e - ) - listener.onFailure(AlertingException.wrap(e)) - } - } - /** - * run doc-level triggers ignoring findings and alerts and generating a single alert. - */ - private suspend fun runForEachDocTriggerCreateSingleGroupedAlert( - monitorResult: MonitorRunResult, - trigger: DocumentLevelTrigger, - monitor: Monitor, - queryToDocIds: Map>, - dryrun: Boolean, - executionId: String, - workflowRunContext: WorkflowRunContext? - ): DocumentLevelTriggerRunResult { - val triggerResult = triggerService.runDocLevelTrigger(monitor, trigger, queryToDocIds) - if (triggerResult.triggeredDocs.isNotEmpty()) { - val findingIds = if (workflowRunContext?.findingIds != null) { - workflowRunContext.findingIds - } else { - listOf() - } - val triggerCtx = DocumentLevelTriggerExecutionContext(monitor, trigger) - val alert = alertService.composeDocLevelAlert( - findingIds!!, - triggerResult.triggeredDocs, - triggerCtx, - monitorResult.alertError() ?: triggerResult.alertError(), - executionId = executionId, - workflorwRunContext = workflowRunContext - ) for (action in trigger.actions) { - this.runAction(action, triggerCtx.copy(alerts = listOf(AlertContext(alert))), monitor, dryrun) + val actionExecutionScope = action.getActionExecutionPolicy(monitor)!!.actionExecutionScope + if (actionExecutionScope is PerAlertActionScope && !shouldDefaultToPerExecution) { + for (alertContext in alertContexts) { + val actionResults = this.runAction(action, actionCtx.copy(alerts = listOf(alertContext)), monitor, dryrun) + triggerResult.actionResultsMap.getOrPut(alertContext.alert.id) { mutableMapOf() } + triggerResult.actionResultsMap[alertContext.alert.id]?.set(action.id, actionResults) + } + } else if (alertContexts.isNotEmpty()) { + val actionResults = this.runAction(action, actionCtx.copy(alerts = alertContexts), monitor, dryrun) + for (alert in alerts) { + triggerResult.actionResultsMap.getOrPut(alert.id) { mutableMapOf() } + triggerResult.actionResultsMap[alert.id]?.set(action.id, actionResults) + } + } } + // Alerts are saved after the actions since if there are failures in the actions, they can be stated in the alert if (!dryrun && monitor.id != Monitor.NO_ID) { - val actionResults = triggerResult.actionResultsMap.getOrDefault(alert.id, emptyMap()) - val actionExecutionResults = actionResults.values.map { actionRunResult -> - ActionExecutionResult(actionRunResult.actionId, actionRunResult.executionTime, if (actionRunResult.throttled) 1 else 0) - } - val updatedAlert = alert.copy(actionExecutionResults = actionExecutionResults) + val updatedAlerts = + alerts.map { alert -> + val actionResults = triggerResult.actionResultsMap.getOrDefault(alert.id, emptyMap()) + val actionExecutionResults = + actionResults.values.map { actionRunResult -> + ActionExecutionResult( + actionRunResult.actionId, + actionRunResult.executionTime, + if (actionRunResult.throttled) 1 else 0, + ) + } + alert.copy(actionExecutionResults = actionExecutionResults) + } retryPolicy.let { alertService.saveAlerts( monitor.dataSources, - listOf(updatedAlert), + updatedAlerts, it, - routingId = monitor.id + routingId = monitor.id, ) } } + return triggerResult } - return DocumentLevelTriggerRunResult(trigger.name, listOf(), monitorResult.error) - } - private suspend fun runForEachDocTrigger( - monitorResult: MonitorRunResult, - trigger: DocumentLevelTrigger, - monitor: Monitor, - idQueryMap: Map, - docsToQueries: MutableMap>, - queryToDocIds: Map>, - dryrun: Boolean, - executionId: String, - findingIdToDocSource: MutableMap, - workflowRunContext: WorkflowRunContext? - ): DocumentLevelTriggerRunResult { - val triggerCtx = DocumentLevelTriggerExecutionContext(monitor, trigger) - val triggerResult = triggerService.runDocLevelTrigger(monitor, trigger, queryToDocIds) - - val triggerFindingDocPairs = mutableListOf>() - - // TODO: Implement throttling for findings - val findingToDocPairs = createFindings( - monitor, - docsToQueries, - idQueryMap, - !dryrun && monitor.id != Monitor.NO_ID, - executionId - ) - - findingToDocPairs.forEach { - // Only pick those entries whose docs have triggers associated with them - if (triggerResult.triggeredDocs.contains(it.second)) { - triggerFindingDocPairs.add(Pair(it.first, it.second)) + /** + * 1. Bulk index all findings based on shouldCreateFinding flag + * 2. invoke publishFinding() to kickstart auto-correlations + * 3. Returns a list of pairs for finding id to doc id + */ + private suspend fun createFindings( + monitor: Monitor, + docsToQueries: MutableMap>, + idQueryMap: Map, + shouldCreateFinding: Boolean, + workflowExecutionId: String? = null, + ): List> { + val findingDocPairs = mutableListOf>() + val findings = mutableListOf() + val indexRequests = mutableListOf() + val findingsToTriggeredQueries = mutableMapOf>() + + docsToQueries.forEach { + val triggeredQueries = it.value.map { queryId -> idQueryMap[queryId]!! } + + // Before the "|" is the doc id and after the "|" is the index + val docIndex = it.key.split("|") + + val finding = + Finding( + id = UUID.randomUUID().toString(), + relatedDocIds = listOf(docIndex[0]), + correlatedDocIds = listOf(docIndex[0]), + monitorId = monitor.id, + monitorName = monitor.name, + index = docIndex[1], + docLevelQueries = triggeredQueries, + timestamp = Instant.now(), + executionId = workflowExecutionId, + ) + findingDocPairs.add(Pair(finding.id, it.key)) + findings.add(finding) + findingsToTriggeredQueries[finding.id] = triggeredQueries + + val findingStr = + finding + .toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS) + .string() + log.trace("Findings: $findingStr") + + if (shouldCreateFinding and ( + monitor.shouldCreateSingleAlertForFindings == null || + monitor.shouldCreateSingleAlertForFindings == false + ) + ) { + indexRequests += + IndexRequest(monitor.dataSources.findingsIndex) + .source(findingStr, XContentType.JSON) + .id(finding.id) + .opType(DocWriteRequest.OpType.CREATE) + } } - } - - val actionCtx = triggerCtx.copy( - triggeredDocs = triggerResult.triggeredDocs, - relatedFindings = findingToDocPairs.map { it.first }, - error = monitorResult.error ?: triggerResult.error - ) - - if (printsSampleDocData(trigger) && triggerFindingDocPairs.isNotEmpty()) - getDocSources( - findingToDocPairs = findingToDocPairs, - monitor = monitor, - findingIdToDocSource = findingIdToDocSource - ) - - val alerts = mutableListOf() - val alertContexts = mutableListOf() - triggerFindingDocPairs.forEach { - val alert = alertService.composeDocLevelAlert( - listOf(it.first), - listOf(it.second), - triggerCtx, - monitorResult.alertError() ?: triggerResult.alertError(), - executionId = executionId, - workflorwRunContext = workflowRunContext - ) - alerts.add(alert) - - val docSource = findingIdToDocSource[alert.findingIds.first()]?.response?.convertToMap() - - alertContexts.add( - AlertContext( - alert = alert, - associatedQueries = alert.findingIds.flatMap { findingId -> - findingsToTriggeredQueries.getOrDefault(findingId, emptyList()) ?: emptyList() - }, - sampleDocs = listOfNotNull(docSource) - ) - ) - } - val shouldDefaultToPerExecution = defaultToPerExecutionAction( - maxActionableAlertCount, - monitorId = monitor.id, - triggerId = trigger.id, - totalActionableAlertCount = alerts.size, - monitorOrTriggerError = actionCtx.error - ) - - for (action in trigger.actions) { - val actionExecutionScope = action.getActionExecutionPolicy(monitor)!!.actionExecutionScope - if (actionExecutionScope is PerAlertActionScope && !shouldDefaultToPerExecution) { - for (alertContext in alertContexts) { - val actionResults = this.runAction(action, actionCtx.copy(alerts = listOf(alertContext)), monitor, dryrun) - triggerResult.actionResultsMap.getOrPut(alertContext.alert.id) { mutableMapOf() } - triggerResult.actionResultsMap[alertContext.alert.id]?.set(action.id, actionResults) - } - } else if (alertContexts.isNotEmpty()) { - val actionResults = this.runAction(action, actionCtx.copy(alerts = alertContexts), monitor, dryrun) - for (alert in alerts) { - triggerResult.actionResultsMap.getOrPut(alert.id) { mutableMapOf() } - triggerResult.actionResultsMap[alert.id]?.set(action.id, actionResults) - } + if (indexRequests.isNotEmpty()) { + bulkIndexFindings(monitor, indexRequests) } - } - // Alerts are saved after the actions since if there are failures in the actions, they can be stated in the alert - if (!dryrun && monitor.id != Monitor.NO_ID) { - val updatedAlerts = alerts.map { alert -> - val actionResults = triggerResult.actionResultsMap.getOrDefault(alert.id, emptyMap()) - val actionExecutionResults = actionResults.values.map { actionRunResult -> - ActionExecutionResult(actionRunResult.actionId, actionRunResult.executionTime, if (actionRunResult.throttled) 1 else 0) + if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { + try { + findings.forEach { finding -> + publishFinding(monitor, finding) + } + } catch (e: Exception) { + // suppress exception + log.error("Optional finding callback failed", e) } - alert.copy(actionExecutionResults = actionExecutionResults) } + this.findingsToTriggeredQueries += findingsToTriggeredQueries - retryPolicy.let { - alertService.saveAlerts( - monitor.dataSources, - updatedAlerts, - it, - routingId = monitor.id - ) - } + return findingDocPairs } - return triggerResult - } - /** - * 1. Bulk index all findings based on shouldCreateFinding flag - * 2. invoke publishFinding() to kickstart auto-correlations - * 3. Returns a list of pairs for finding id to doc id - */ - private suspend fun createFindings( - monitor: Monitor, - docsToQueries: MutableMap>, - idQueryMap: Map, - shouldCreateFinding: Boolean, - workflowExecutionId: String? = null, - ): List> { - - val findingDocPairs = mutableListOf>() - val findings = mutableListOf() - val indexRequests = mutableListOf() - val findingsToTriggeredQueries = mutableMapOf>() - - docsToQueries.forEach { - val triggeredQueries = it.value.map { queryId -> idQueryMap[queryId]!! } - - // Before the "|" is the doc id and after the "|" is the index - val docIndex = it.key.split("|") - - val finding = Finding( - id = UUID.randomUUID().toString(), - relatedDocIds = listOf(docIndex[0]), - correlatedDocIds = listOf(docIndex[0]), - monitorId = monitor.id, - monitorName = monitor.name, - index = docIndex[1], - docLevelQueries = triggeredQueries, - timestamp = Instant.now(), - executionId = workflowExecutionId - ) - findingDocPairs.add(Pair(finding.id, it.key)) - findings.add(finding) - findingsToTriggeredQueries[finding.id] = triggeredQueries - - val findingStr = - finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS) - .string() - log.trace("Findings: $findingStr") - - if (shouldCreateFinding and ( - monitor.shouldCreateSingleAlertForFindings == null || - monitor.shouldCreateSingleAlertForFindings == false - ) - ) { - indexRequests += IndexRequest(monitor.dataSources.findingsIndex) - .source(findingStr, XContentType.JSON) - .id(finding.id) - .opType(DocWriteRequest.OpType.CREATE) + private suspend fun bulkIndexFindings( + monitor: Monitor, + indexRequests: List, + ) { + indexRequests.chunked(findingsIndexBatchSize).forEach { batch -> + val bulkResponse: BulkResponse = + client.suspendUntil { + bulk(BulkRequest().add(batch), it) + } + if (bulkResponse.hasFailures()) { + bulkResponse.items.forEach { item -> + if (item.isFailed) { + log.error("Failed indexing the finding ${item.id} of monitor [${monitor.id}]") + } + } + } else { + log.debug("[${bulkResponse.items.size}] All findings successfully indexed.") + } } + client.execute(RefreshAction.INSTANCE, RefreshRequest(monitor.dataSources.findingsIndex)) } - if (indexRequests.isNotEmpty()) { - bulkIndexFindings(monitor, indexRequests) + private fun publishFinding( + monitor: Monitor, + finding: Finding, + ) { + val publishFindingsRequest = PublishFindingsRequest(monitor.id, finding) + AlertingPluginInterface.publishFinding( + client as NodeClient, + publishFindingsRequest, + object : ActionListener { + override fun onResponse(response: SubscribeFindingsResponse) {} + + override fun onFailure(e: Exception) {} + }, + ) } - if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { - try { - findings.forEach { finding -> - publishFinding(monitor, finding) + suspend fun runAction( + action: Action, + ctx: TriggerExecutionContext, + monitor: Monitor, + dryrun: Boolean, + ): ActionRunResult { + return try { + if (ctx is QueryLevelTriggerExecutionContext && !MonitorRunnerService.isActionActionable(action, ctx.alert?.alert)) { + return ActionRunResult(action.id, action.name, mapOf(), true, null, null) } - } catch (e: Exception) { - // suppress exception - log.error("Optional finding callback failed", e) - } - } - this.findingsToTriggeredQueries += findingsToTriggeredQueries - - return findingDocPairs - } - - private suspend fun bulkIndexFindings( - monitor: Monitor, - indexRequests: List - ) { - indexRequests.chunked(findingsIndexBatchSize).forEach { batch -> - val bulkResponse: BulkResponse = client.suspendUntil { - bulk(BulkRequest().add(batch), it) - } - if (bulkResponse.hasFailures()) { - bulkResponse.items.forEach { item -> - if (item.isFailed) { - log.error("Failed indexing the finding ${item.id} of monitor [${monitor.id}]") + val actionOutput = mutableMapOf() + actionOutput[Action.SUBJECT] = + if (action.subjectTemplate != null) { + compileTemplate(action.subjectTemplate!!, ctx) + } else { + "" + } + actionOutput[Action.MESSAGE] = compileTemplate(action.messageTemplate, ctx) + if (Strings.isNullOrEmpty(actionOutput[Action.MESSAGE])) { + throw IllegalStateException("Message content missing in the Destination with id: ${action.destinationId}") + } + if (!dryrun) { + client.threadPool().threadContext.stashContext().use { + withClosableContext( + InjectorContextElement( + monitor.id, + settings, + client.threadPool().threadContext, + monitor.user?.roles, + monitor.user, + ), + ) { + actionOutput[Action.MESSAGE_ID] = + getConfigAndSendNotification( + action, + actionOutput[Action.SUBJECT], + actionOutput[Action.MESSAGE]!!, + ) + } } } - } else { - log.debug("[${bulkResponse.items.size}] All findings successfully indexed.") + ActionRunResult( + action.id, + action.name, + actionOutput, + false, + Instant.ofEpochMilli(client.threadPool().absoluteTimeInMillis()), + null, + ) + } catch (e: Exception) { + ActionRunResult(action.id, action.name, mapOf(), false, Instant.ofEpochMilli(client.threadPool().absoluteTimeInMillis()), e) } } - client.execute(RefreshAction.INSTANCE, RefreshRequest(monitor.dataSources.findingsIndex)) - } - private fun publishFinding( - monitor: Monitor, - finding: Finding - ) { - val publishFindingsRequest = PublishFindingsRequest(monitor.id, finding) - AlertingPluginInterface.publishFinding( - client as NodeClient, - publishFindingsRequest, - object : ActionListener { - override fun onResponse(response: SubscribeFindingsResponse) {} - - override fun onFailure(e: Exception) {} + protected suspend fun getConfigAndSendNotification( + action: Action, + subject: String?, + message: String, + ): String { + val config = getConfigForNotificationAction(action) + if (config.destination == null && config.channel == null) { + throw IllegalStateException("Unable to find a Notification Channel or Destination config with id [${action.destinationId}]") } - ) - } - suspend fun runAction( - action: Action, - ctx: TriggerExecutionContext, - monitor: Monitor, - dryrun: Boolean - ): ActionRunResult { - return try { - if (ctx is QueryLevelTriggerExecutionContext && !MonitorRunnerService.isActionActionable(action, ctx.alert?.alert)) { - return ActionRunResult(action.id, action.name, mapOf(), true, null, null) - } - val actionOutput = mutableMapOf() - actionOutput[Action.SUBJECT] = if (action.subjectTemplate != null) - compileTemplate(action.subjectTemplate!!, ctx) - else "" - actionOutput[Action.MESSAGE] = compileTemplate(action.messageTemplate, ctx) - if (Strings.isNullOrEmpty(actionOutput[Action.MESSAGE])) { - throw IllegalStateException("Message content missing in the Destination with id: ${action.destinationId}") + // Adding a check on TEST_ACTION Destination type here to avoid supporting it as a LegacyBaseMessage type + // just for Alerting integration tests + if (config.destination?.isTestAction() == true) { + return "test action" } - if (!dryrun) { - client.threadPool().threadContext.stashContext().use { - withClosableContext( - InjectorContextElement( - monitor.id, - settings, - client.threadPool().threadContext, - monitor.user?.roles, - monitor.user - ) - ) { - actionOutput[Action.MESSAGE_ID] = getConfigAndSendNotification( - action, - actionOutput[Action.SUBJECT], - actionOutput[Action.MESSAGE]!! - ) - } - } + + if (config.destination?.isAllowed(allowList) == false) { + throw IllegalStateException( + "Monitor contains a Destination type that is not allowed: ${config.destination.type}", + ) } - ActionRunResult( - action.id, - action.name, - actionOutput, - false, - Instant.ofEpochMilli(client.threadPool().absoluteTimeInMillis()), - null - ) - } catch (e: Exception) { - ActionRunResult(action.id, action.name, mapOf(), false, Instant.ofEpochMilli(client.threadPool().absoluteTimeInMillis()), e) - } - } - protected suspend fun getConfigAndSendNotification( - action: Action, - subject: String?, - message: String - ): String { - val config = getConfigForNotificationAction(action) - if (config.destination == null && config.channel == null) { - throw IllegalStateException("Unable to find a Notification Channel or Destination config with id [${action.destinationId}]") - } + var actionResponseContent = "" + actionResponseContent = config.channel + ?.sendNotification( + client, + config.channel.getTitle(subject), + message, + ) ?: actionResponseContent - // Adding a check on TEST_ACTION Destination type here to avoid supporting it as a LegacyBaseMessage type - // just for Alerting integration tests - if (config.destination?.isTestAction() == true) { - return "test action" - } + actionResponseContent = config.destination + ?.buildLegacyBaseMessage(subject, message, getDestinationContextFactory().getDestinationContext(config.destination)) + ?.publishLegacyNotification(client) + ?: actionResponseContent - if (config.destination?.isAllowed(allowList) == false) { - throw IllegalStateException( - "Monitor contains a Destination type that is not allowed: ${config.destination.type}" - ) + return actionResponseContent } - var actionResponseContent = "" - actionResponseContent = config.channel - ?.sendNotification( - client, - config.channel.getTitle(subject), - message - ) ?: actionResponseContent + private fun isFanOutTimeEnded(endTime: Instant): Boolean = Instant.now().isAfter(endTime) + + /** 1. Fetch data per shard for given index. (only 10000 docs are fetched. + * needs to be converted to scroll if not performant enough) + * 2. Transform documents to conform to format required for percolate query + * 3a. Check if docs in memory are crossing threshold defined by setting. + * 3b. If yes, perform percolate query and update docToQueries Map with all hits from percolate queries */ + suspend fun fetchShardDataAndMaybeExecutePercolateQueries( // package-private for testing visibility + monitor: Monitor, + endTime: Instant, + indexExecutionCtx: IndexExecutionContext, + monitorMetadata: MonitorMetadata, + inputRunResults: MutableMap>, + docsToQueries: MutableMap>, + monitorInputIndices: List, + concreteIndices: List, + fieldsToBeQueried: List, + shardList: List, + transformedDocs: MutableList>, + updateLastRunContext: (String, Long) -> Unit, + ) { + for (shardId in shardList) { + val shard = shardId.toString() + try { + val prevSeqNo = indexExecutionCtx.lastRunContext[shard].toString().toLongOrNull() + val from = prevSeqNo ?: SequenceNumbers.NO_OPS_PERFORMED + if (isFanOutTimeEnded(endTime)) { + log.info( + "Doc level monitor ${monitor.id}: " + + "Fanout execution on node ${clusterService.localNode().id}" + + " unable to complete in $docLevelMonitorFanoutMaxDuration." + + " Skipping shard [${indexExecutionCtx.concreteIndexName}][$shardId]", + ) + continue + } + // First get the max sequence number for the shard + val maxSeqNo = + getMaxSeqNoForShard( + monitor, + indexExecutionCtx.concreteIndexName, + shard, + indexExecutionCtx.docIds, + ) - actionResponseContent = config.destination - ?.buildLegacyBaseMessage(subject, message, getDestinationContextFactory().getDestinationContext(config.destination)) - ?.publishLegacyNotification(client) - ?: actionResponseContent + if (maxSeqNo == null || maxSeqNo <= from) { + // No new documents to process + updateLastRunContext(shard, (prevSeqNo ?: SequenceNumbers.NO_OPS_PERFORMED)) + continue + } + // Process documents in chunks between prevSeqNo and maxSeqNo + var currentSeqNo = from + while (currentSeqNo < maxSeqNo) { + if (isFanOutTimeEnded(endTime)) { // process percolate queries and exit + if ( + transformedDocs.isNotEmpty() && + shouldPerformPercolateQueryAndFlushInMemoryDocs(transformedDocs.size) + ) { + performPercolateQueryAndResetCounters( + monitor, + monitorMetadata, + monitorInputIndices, + concreteIndices, + inputRunResults, + docsToQueries, + transformedDocs, + ) + } + log.info( + "Doc level monitor ${monitor.id}: " + + "Fanout execution on node ${clusterService.localNode().id}" + + " unable to complete in $docLevelMonitorFanoutMaxDuration!! Gracefully exiting." + + "FanoutShardStats: shard[${indexExecutionCtx.concreteIndexName}][$shardId], " + + "start_seq_no[$from], current_seq_no[$currentSeqNo], max_seq_no[$maxSeqNo]", + ) + break + } + val hits = + searchShard( + monitor, + indexExecutionCtx.concreteIndexName, + shard, + currentSeqNo, + maxSeqNo, + indexExecutionCtx.docIds, + fieldsToBeQueried, + ) - return actionResponseContent - } + if (hits.hits.isEmpty()) { + break + } - private fun isFanOutTimeEnded(endTime: Instant): Boolean { - return Instant.now().isAfter(endTime) - } + val startTime = System.currentTimeMillis() + val newDocs = + transformSearchHitsAndReconstructDocs( + hits, + indexExecutionCtx.indexName, + indexExecutionCtx.concreteIndexName, + monitor.id, + indexExecutionCtx.conflictingFields, + ) - /** 1. Fetch data per shard for given index. (only 10000 docs are fetched. - * needs to be converted to scroll if not performant enough) - * 2. Transform documents to conform to format required for percolate query - * 3a. Check if docs in memory are crossing threshold defined by setting. - * 3b. If yes, perform percolate query and update docToQueries Map with all hits from percolate queries */ - suspend fun fetchShardDataAndMaybeExecutePercolateQueries( // package-private for testing visibility - monitor: Monitor, - endTime: Instant, - indexExecutionCtx: IndexExecutionContext, - monitorMetadata: MonitorMetadata, - inputRunResults: MutableMap>, - docsToQueries: MutableMap>, - monitorInputIndices: List, - concreteIndices: List, - fieldsToBeQueried: List, - shardList: List, - transformedDocs: MutableList>, - updateLastRunContext: (String, Long) -> Unit - ) { - for (shardId in shardList) { - val shard = shardId.toString() - try { - val prevSeqNo = indexExecutionCtx.lastRunContext[shard].toString().toLongOrNull() - val from = prevSeqNo ?: SequenceNumbers.NO_OPS_PERFORMED - if (isFanOutTimeEnded(endTime)) { - log.info( - "Doc level monitor ${monitor.id}: " + - "Fanout execution on node ${clusterService.localNode().id}" + - " unable to complete in $docLevelMonitorFanoutMaxDuration." + - " Skipping shard [${indexExecutionCtx.concreteIndexName}][$shardId]" - ) - continue - } - // First get the max sequence number for the shard - val maxSeqNo = getMaxSeqNoForShard( - monitor, - indexExecutionCtx.concreteIndexName, - shard, - indexExecutionCtx.docIds - ) + transformedDocs.addAll(newDocs) - if (maxSeqNo == null || maxSeqNo <= from) { - // No new documents to process - updateLastRunContext(shard, (prevSeqNo ?: SequenceNumbers.NO_OPS_PERFORMED)) - continue - } - // Process documents in chunks between prevSeqNo and maxSeqNo - var currentSeqNo = from - while (currentSeqNo < maxSeqNo) { - if (isFanOutTimeEnded(endTime)) { // process percolate queries and exit if ( transformedDocs.isNotEmpty() && shouldPerformPercolateQueryAndFlushInMemoryDocs(transformedDocs.size) @@ -817,581 +907,557 @@ class TransportDocLevelMonitorFanOutAction concreteIndices, inputRunResults, docsToQueries, - transformedDocs + transformedDocs, ) } - log.info( - "Doc level monitor ${monitor.id}: " + - "Fanout execution on node ${clusterService.localNode().id}" + - " unable to complete in $docLevelMonitorFanoutMaxDuration!! Gracefully exiting." + - "FanoutShardStats: shard[${indexExecutionCtx.concreteIndexName}][$shardId], " + - "start_seq_no[$from], current_seq_no[$currentSeqNo], max_seq_no[$maxSeqNo]" - ) - break - } - val hits = searchShard( - monitor, - indexExecutionCtx.concreteIndexName, - shard, - currentSeqNo, - maxSeqNo, - indexExecutionCtx.docIds, - fieldsToBeQueried - ) + docTransformTimeTakenStat += System.currentTimeMillis() - startTime - if (hits.hits.isEmpty()) { - break + // Move to next chunk - use the last document's sequence number + currentSeqNo = hits.hits.last().seqNo + // update last seen sequence number after every set of seen docs + updateLastRunContext(shard, currentSeqNo) } - - val startTime = System.currentTimeMillis() - val newDocs = transformSearchHitsAndReconstructDocs( - hits, - indexExecutionCtx.indexName, - indexExecutionCtx.concreteIndexName, - monitor.id, - indexExecutionCtx.conflictingFields, + } catch (e: Exception) { + log.error( + "Monitor ${monitor.id} :" + + "Failed to run fetch data from shard [$shard] of index [${indexExecutionCtx.concreteIndexName}]. " + + "Error: ${e.message}", + e, ) - - transformedDocs.addAll(newDocs) - - if ( - transformedDocs.isNotEmpty() && - shouldPerformPercolateQueryAndFlushInMemoryDocs(transformedDocs.size) - ) { - performPercolateQueryAndResetCounters( - monitor, - monitorMetadata, - monitorInputIndices, - concreteIndices, - inputRunResults, - docsToQueries, - transformedDocs - ) + if (e is IndexClosedException) { + throw e } - docTransformTimeTakenStat += System.currentTimeMillis() - startTime - - // Move to next chunk - use the last document's sequence number - currentSeqNo = hits.hits.last().seqNo - // update last seen sequence number after every set of seen docs - updateLastRunContext(shard, currentSeqNo) } - } catch (e: Exception) { - log.error( - "Monitor ${monitor.id} :" + - "Failed to run fetch data from shard [$shard] of index [${indexExecutionCtx.concreteIndexName}]. " + - "Error: ${e.message}", - e - ) - if (e is IndexClosedException) { - throw e + if ( + transformedDocs.isNotEmpty() && + shouldPerformPercolateQueryAndFlushInMemoryDocs(transformedDocs.size) + ) { + performPercolateQueryAndResetCounters( + monitor, + monitorMetadata, + monitorInputIndices, + concreteIndices, + inputRunResults, + docsToQueries, + transformedDocs, + ) } } - if ( - transformedDocs.isNotEmpty() && - shouldPerformPercolateQueryAndFlushInMemoryDocs(transformedDocs.size) - ) { - performPercolateQueryAndResetCounters( - monitor, - monitorMetadata, - monitorInputIndices, - concreteIndices, - inputRunResults, - docsToQueries, - transformedDocs - ) - } } - } - private suspend fun performPercolateQueryAndResetCounters( - monitor: Monitor, - monitorMetadata: MonitorMetadata, - monitorInputIndices: List, - concreteIndices: List, - inputRunResults: MutableMap>, - docsToQueries: MutableMap>, - transformedDocs: MutableList> - ) { - try { - val percolateQueryResponseHits = runPercolateQueryOnTransformedDocs( - transformedDocs, - monitor, - monitorMetadata, - concreteIndices, - monitorInputIndices, - ) + private suspend fun performPercolateQueryAndResetCounters( + monitor: Monitor, + monitorMetadata: MonitorMetadata, + monitorInputIndices: List, + concreteIndices: List, + inputRunResults: MutableMap>, + docsToQueries: MutableMap>, + transformedDocs: MutableList>, + ) { + try { + val percolateQueryResponseHits = + runPercolateQueryOnTransformedDocs( + transformedDocs, + monitor, + monitorMetadata, + concreteIndices, + monitorInputIndices, + ) - percolateQueryResponseHits.forEach { hit -> - var id = hit.id - concreteIndices.forEach { id = id.replace("_${it}_${monitor.id}", "") } - monitorInputIndices.forEach { id = id.replace("_${it}_${monitor.id}", "") } - val docIndices = hit.field("_percolator_document_slot").values.map { it.toString().toInt() } - docIndices.forEach { idx -> - val docIndex = "${transformedDocs[idx].first}|${transformedDocs[idx].second.concreteIndexName}" - inputRunResults.getOrPut(id) { mutableSetOf() }.add(docIndex) - docsToQueries.getOrPut(docIndex) { mutableListOf() }.add(id) + percolateQueryResponseHits.forEach { hit -> + var id = hit.id + concreteIndices.forEach { id = id.replace("_${it}_${monitor.id}", "") } + monitorInputIndices.forEach { id = id.replace("_${it}_${monitor.id}", "") } + val docIndices = hit.field("_percolator_document_slot").values.map { it.toString().toInt() } + docIndices.forEach { idx -> + val docIndex = "${transformedDocs[idx].first}|${transformedDocs[idx].second.concreteIndexName}" + inputRunResults.getOrPut(id) { mutableSetOf() }.add(docIndex) + docsToQueries.getOrPut(docIndex) { mutableListOf() }.add(id) + } } + totalDocsQueriedStat += transformedDocs.size.toLong() + } finally { + transformedDocs.clear() + docsSizeOfBatchInBytes = 0 } - totalDocsQueriedStat += transformedDocs.size.toLong() - } finally { - transformedDocs.clear() - docsSizeOfBatchInBytes = 0 } - } - private suspend fun getMaxSeqNoForShard( - monitor: Monitor, - index: String, - shard: String, - docIds: List? = null - ): Long? { - val boolQueryBuilder = BoolQueryBuilder() - - if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { - if (!docIds.isNullOrEmpty()) { - boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIds)) - } - } else if (monitor.shouldCreateSingleAlertForFindings == true) { - val docIdsParam = mutableListOf() - if (docIds != null) { - docIdsParam.addAll(docIds) + private suspend fun getMaxSeqNoForShard( + monitor: Monitor, + index: String, + shard: String, + docIds: List? = null, + ): Long? { + val boolQueryBuilder = BoolQueryBuilder() + + if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { + if (!docIds.isNullOrEmpty()) { + boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIds)) + } + } else if (monitor.shouldCreateSingleAlertForFindings == true) { + val docIdsParam = mutableListOf() + if (docIds != null) { + docIdsParam.addAll(docIds) + } + boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIdsParam)) } - boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIdsParam)) - } - - val request = SearchRequest() - .indices(index) - .preference("_shards:$shard") - .source( - SearchSourceBuilder() - .size(1) - .sort("_seq_no", SortOrder.DESC) - .seqNoAndPrimaryTerm(true) - .query(boolQueryBuilder) - ) - val response: SearchResponse = client.suspendUntil { client.search(request, it) } - if (response.status() !== RestStatus.OK) { - throw IOException( - "Failed to get max sequence number for shard: [$shard] in index [$index]. Response status is ${response.status()}" - ) - } + val request = + SearchRequest() + .indices(index) + .preference("_shards:$shard") + .source( + SearchSourceBuilder() + .size(1) + .sort("_seq_no", SortOrder.DESC) + .seqNoAndPrimaryTerm(true) + .query(boolQueryBuilder), + ) - nonPercolateSearchesTimeTakenStat += response.took.millis - return if (response.hits.hits.isNotEmpty()) response.hits.hits[0].seqNo else null - } + val response: SearchResponse = client.suspendUntil { client.search(request, it) } + if (response.status() !== RestStatus.OK) { + throw IOException( + "Failed to get max sequence number for shard: [$shard] in index [$index]. Response status is ${response.status()}", + ) + } - /** Executes percolate query on the docs against the monitor's query index and return the hits from the search response*/ - private suspend fun runPercolateQueryOnTransformedDocs( - docs: MutableList>, - monitor: Monitor, - monitorMetadata: MonitorMetadata, - concreteIndices: List, - monitorInputIndices: List, - ): SearchHits { - val indices = docs.stream().map { it.second.indexName }.distinct().collect(Collectors.toList()) - val boolQueryBuilder = BoolQueryBuilder().must(buildShouldClausesOverPerIndexMatchQueries(indices)) - val percolateQueryBuilder = - PercolateQueryBuilderExt("query", docs.map { it.second.docSource }, XContentType.JSON) - if (monitor.id.isNotEmpty()) { - boolQueryBuilder.must(QueryBuilders.matchQuery("monitor_id", monitor.id).operator(Operator.AND)) - } - boolQueryBuilder.filter(percolateQueryBuilder) - val queryIndices = - docs.map { monitorMetadata.sourceToQueryIndexMapping[it.second.indexName + monitor.id] }.distinct() - if (queryIndices.isEmpty()) { - val message = - "Monitor ${monitor.id}: Failed to resolve query Indices from source indices during monitor execution!" + - " sourceIndices: $monitorInputIndices" - log.error(message) - throw AlertingException.wrap( - OpenSearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR) - ) + nonPercolateSearchesTimeTakenStat += response.took.millis + return if (response.hits.hits.isNotEmpty()) response.hits.hits[0].seqNo else null } - val searchRequest = - SearchRequest().indices(*queryIndices.toTypedArray()).preference(Preference.PRIMARY_FIRST.type()) - val searchSourceBuilder = SearchSourceBuilder() - searchSourceBuilder.query(boolQueryBuilder) - searchRequest.source(searchSourceBuilder) - log.debug( - "Monitor ${monitor.id}: " + - "Executing percolate query for docs from source indices " + - "$monitorInputIndices against query index $queryIndices" - ) - var response: SearchResponse - try { - response = client.suspendUntil { - client.execute(SearchAction.INSTANCE, searchRequest, it) + /** Executes percolate query on the docs against the monitor's query index and return the hits from the search response*/ + private suspend fun runPercolateQueryOnTransformedDocs( + docs: MutableList>, + monitor: Monitor, + monitorMetadata: MonitorMetadata, + concreteIndices: List, + monitorInputIndices: List, + ): SearchHits { + val indices = + docs + .stream() + .map { it.second.indexName } + .distinct() + .collect(Collectors.toList()) + val boolQueryBuilder = BoolQueryBuilder().must(buildShouldClausesOverPerIndexMatchQueries(indices)) + val percolateQueryBuilder = + PercolateQueryBuilderExt("query", docs.map { it.second.docSource }, XContentType.JSON) + if (monitor.id.isNotEmpty()) { + boolQueryBuilder.must(QueryBuilders.matchQuery("monitor_id", monitor.id).operator(Operator.AND)) + } + boolQueryBuilder.filter(percolateQueryBuilder) + val queryIndices = + docs.map { monitorMetadata.sourceToQueryIndexMapping[it.second.indexName + monitor.id] }.distinct() + if (queryIndices.isEmpty()) { + val message = + "Monitor ${monitor.id}: Failed to resolve query Indices from source indices during monitor execution!" + + " sourceIndices: $monitorInputIndices" + log.error(message) + throw AlertingException.wrap( + OpenSearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR), + ) } - } catch (e: Exception) { - throw IllegalStateException( - "Monitor ${monitor.id}:" + - " Failed to run percolate search for sourceIndex [${concreteIndices.joinToString()}] " + - "and queryIndex [${queryIndices.joinToString()}] for ${docs.size} document(s)", - e - ) - } - if (response.status() !== RestStatus.OK) { - throw IOException( - "Monitor ${monitor.id}: Failed to search percolate index: ${queryIndices.joinToString()}. " + - "Response status is ${response.status()}" + val searchRequest = + SearchRequest().indices(*queryIndices.toTypedArray()).preference(Preference.PRIMARY_FIRST.type()) + val searchSourceBuilder = SearchSourceBuilder() + searchSourceBuilder.query(boolQueryBuilder) + searchRequest.source(searchSourceBuilder) + log.debug( + "Monitor ${monitor.id}: " + + "Executing percolate query for docs from source indices " + + "$monitorInputIndices against query index $queryIndices", ) - } - log.debug("Monitor ${monitor.id} PERF_DEBUG: Percolate query time taken millis = ${response.took}") - percolateQueriesTimeTakenStat += response.took.millis - return response.hits - } + var response: SearchResponse + try { + response = + client.suspendUntil { + client.execute(SearchAction.INSTANCE, searchRequest, it) + } + } catch (e: Exception) { + throw IllegalStateException( + "Monitor ${monitor.id}:" + + " Failed to run percolate search for sourceIndex [${concreteIndices.joinToString()}] " + + "and queryIndex [${queryIndices.joinToString()}] for ${docs.size} document(s)", + e, + ) + } - /** we cannot use terms query because `index` field's mapping is of type TEXT and not keyword. Refer doc-level-queries.json*/ - private fun buildShouldClausesOverPerIndexMatchQueries(indices: List): BoolQueryBuilder { - val boolQueryBuilder = QueryBuilders.boolQuery() - indices.forEach { boolQueryBuilder.should(QueryBuilders.matchQuery("index", it)) } - return boolQueryBuilder - } + if (response.status() !== RestStatus.OK) { + throw IOException( + "Monitor ${monitor.id}: Failed to search percolate index: ${queryIndices.joinToString()}. " + + "Response status is ${response.status()}", + ) + } + log.debug("Monitor ${monitor.id} PERF_DEBUG: Percolate query time taken millis = ${response.took}") + percolateQueriesTimeTakenStat += response.took.millis + return response.hits + } - /** Executes search query on given shard of given index to fetch docs with sequence number greater than prevSeqNo. - * This method hence fetches only docs from shard which haven't been queried before - */ - private suspend fun searchShard( - monitor: Monitor, - index: String, - shard: String, - prevSeqNo: Long?, - maxSeqNo: Long, - docIds: List? = null, - fieldsToFetch: List, - ): SearchHits { - if (prevSeqNo?.equals(maxSeqNo) == true && maxSeqNo != 0L) { - return SearchHits.empty() + /** we cannot use terms query because `index` field's mapping is of type TEXT and not keyword. Refer doc-level-queries.json*/ + private fun buildShouldClausesOverPerIndexMatchQueries(indices: List): BoolQueryBuilder { + val boolQueryBuilder = QueryBuilders.boolQuery() + indices.forEach { boolQueryBuilder.should(QueryBuilders.matchQuery("index", it)) } + return boolQueryBuilder } - val boolQueryBuilder = BoolQueryBuilder() - boolQueryBuilder.filter(QueryBuilders.rangeQuery("_seq_no").gt(prevSeqNo).lte(maxSeqNo)) - if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { - if (!docIds.isNullOrEmpty()) { - boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIds)) + /** Executes search query on given shard of given index to fetch docs with sequence number greater than prevSeqNo. + * This method hence fetches only docs from shard which haven't been queried before + */ + private suspend fun searchShard( + monitor: Monitor, + index: String, + shard: String, + prevSeqNo: Long?, + maxSeqNo: Long, + docIds: List? = null, + fieldsToFetch: List, + ): SearchHits { + if (prevSeqNo?.equals(maxSeqNo) == true && maxSeqNo != 0L) { + return SearchHits.empty() } - } else if (monitor.shouldCreateSingleAlertForFindings == true) { - val docIdsParam = mutableListOf() - if (docIds != null) { - docIdsParam.addAll(docIds) + val boolQueryBuilder = BoolQueryBuilder() + boolQueryBuilder.filter(QueryBuilders.rangeQuery("_seq_no").gt(prevSeqNo).lte(maxSeqNo)) + + if (monitor.shouldCreateSingleAlertForFindings == null || monitor.shouldCreateSingleAlertForFindings == false) { + if (!docIds.isNullOrEmpty()) { + boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIds)) + } + } else if (monitor.shouldCreateSingleAlertForFindings == true) { + val docIdsParam = mutableListOf() + if (docIds != null) { + docIdsParam.addAll(docIds) + } + boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIdsParam)) } - boolQueryBuilder.filter(QueryBuilders.termsQuery("_id", docIdsParam)) - } - val request: SearchRequest = SearchRequest() - .indices(index) - .preference("_shards:$shard") - .source( - SearchSourceBuilder() - .version(true) - .sort("_seq_no", SortOrder.ASC) - .seqNoAndPrimaryTerm(true) - .query(boolQueryBuilder) - .size(docLevelMonitorShardFetchSize) - ) + val request: SearchRequest = + SearchRequest() + .indices(index) + .preference("_shards:$shard") + .source( + SearchSourceBuilder() + .version(true) + .sort("_seq_no", SortOrder.ASC) + .seqNoAndPrimaryTerm(true) + .query(boolQueryBuilder) + .size(docLevelMonitorShardFetchSize), + ) - if (fieldsToFetch.isNotEmpty() && fetchOnlyQueryFieldNames) { - request.source().fetchSource(false) - for (field in fieldsToFetch) { - request.source().fetchField(field) + if (fieldsToFetch.isNotEmpty() && fetchOnlyQueryFieldNames) { + request.source().fetchSource(false) + for (field in fieldsToFetch) { + request.source().fetchField(field) + } + } + val response: SearchResponse = client.suspendUntil { client.search(request, it) } + if (response.status() !== RestStatus.OK) { + throw IOException("Failed to search shard: [$shard] in index [$index]. Response status is ${response.status()}") } + nonPercolateSearchesTimeTakenStat += response.took.millis + return response.hits } - val response: SearchResponse = client.suspendUntil { client.search(request, it) } - if (response.status() !== RestStatus.OK) { - throw IOException("Failed to search shard: [$shard] in index [$index]. Response status is ${response.status()}") + + /** Transform field names and index names in all the search hits to format required to run percolate search against them. + * Hits are transformed using method transformDocumentFieldNames() */ + private fun transformSearchHitsAndReconstructDocs( + hits: SearchHits, + index: String, + concreteIndex: String, + monitorId: String, + conflictingFields: List, + ): List> { + return hits.mapNotNull(fun(hit: SearchHit): Pair? { + try { + val sourceMap = + if (hit.hasSource()) { + hit.sourceAsMap + } else { + constructSourceMapFromFieldsInHit(hit) + } + transformDocumentFieldNames( + sourceMap, + conflictingFields, + "_${index}_$monitorId", + "_${concreteIndex}_$monitorId", + "", + ) + var xContentBuilder = XContentFactory.jsonBuilder().map(sourceMap) + val sourceRef = BytesReference.bytes(xContentBuilder) + docsSizeOfBatchInBytes += sourceRef.ramBytesUsed() + totalDocsSizeInBytesStat += sourceRef.ramBytesUsed() + return Pair( + hit.id, + TransformedDocDto(index, concreteIndex, hit.id, sourceRef), + ) + } catch (e: Exception) { + log.error("Monitor $monitorId: Failed to transform payload $hit for percolate query", e) + // skip any document which we fail to transform because we anyway won't be able to run percolate queries on them. + return null + } + }) } - nonPercolateSearchesTimeTakenStat += response.took.millis - return response.hits - } - /** Transform field names and index names in all the search hits to format required to run percolate search against them. - * Hits are transformed using method transformDocumentFieldNames() */ - private fun transformSearchHitsAndReconstructDocs( - hits: SearchHits, - index: String, - concreteIndex: String, - monitorId: String, - conflictingFields: List, - ): List> { - return hits.mapNotNull(fun(hit: SearchHit): Pair? { - try { - val sourceMap = if (hit.hasSource()) { - hit.sourceAsMap - } else { - constructSourceMapFromFieldsInHit(hit) + private fun constructSourceMapFromFieldsInHit(hit: SearchHit): MutableMap { + if (hit.fields == null) { + return mutableMapOf() + } + val sourceMap: MutableMap = mutableMapOf() + for (field in hit.fields) { + if (field.value.values != null && field.value.values.isNotEmpty()) { + if (field.value.values.size == 1) { + sourceMap[field.key] = field.value.values[0] + } else { + sourceMap[field.key] = field.value.values + } } - transformDocumentFieldNames( - sourceMap, - conflictingFields, - "_${index}_$monitorId", - "_${concreteIndex}_$monitorId", - "" - ) - var xContentBuilder = XContentFactory.jsonBuilder().map(sourceMap) - val sourceRef = BytesReference.bytes(xContentBuilder) - docsSizeOfBatchInBytes += sourceRef.ramBytesUsed() - totalDocsSizeInBytesStat += sourceRef.ramBytesUsed() - return Pair( - hit.id, - TransformedDocDto(index, concreteIndex, hit.id, sourceRef) - ) - } catch (e: Exception) { - log.error("Monitor $monitorId: Failed to transform payload $hit for percolate query", e) - // skip any document which we fail to transform because we anyway won't be able to run percolate queries on them. - return null } - }) - } - - private fun constructSourceMapFromFieldsInHit(hit: SearchHit): MutableMap { - if (hit.fields == null) - return mutableMapOf() - val sourceMap: MutableMap = mutableMapOf() - for (field in hit.fields) { - if (field.value.values != null && field.value.values.isNotEmpty()) - if (field.value.values.size == 1) { - sourceMap[field.key] = field.value.values[0] - } else sourceMap[field.key] = field.value.values + return sourceMap } - return sourceMap - } - /** - * Traverses document fields in leaves recursively and appends [fieldNameSuffixIndex] to field names with same names - * but different mappings & [fieldNameSuffixPattern] to field names which have unique names. - * - * Example for index name is my_log_index and Monitor ID is TReewWdsf2gdJFV: - * { { - * "a": { "a": { - * "b": 1234 ----> "b_my_log_index_TReewWdsf2gdJFV": 1234 - * } } - * } - * - * @param jsonAsMap Input JSON (as Map) - * @param fieldNameSuffix Field suffix which is appended to existing field name - */ - private fun transformDocumentFieldNames( - jsonAsMap: MutableMap, - conflictingFields: List, - fieldNameSuffixPattern: String, - fieldNameSuffixIndex: String, - fieldNamePrefix: String - ) { - val tempMap = mutableMapOf() - val it: MutableIterator> = jsonAsMap.entries.iterator() - while (it.hasNext()) { - val entry = it.next() - if (entry.value is Map<*, *>) { - transformDocumentFieldNames( - entry.value as MutableMap, - conflictingFields, - fieldNameSuffixPattern, - fieldNameSuffixIndex, - if (fieldNamePrefix == "") entry.key else "$fieldNamePrefix.${entry.key}" - ) - } else if (!entry.key.endsWith(fieldNameSuffixPattern) && !entry.key.endsWith(fieldNameSuffixIndex)) { - var alreadyReplaced = false - conflictingFields.forEach { conflictingField -> - if (conflictingField == "$fieldNamePrefix.${entry.key}" || (fieldNamePrefix == "" && conflictingField == entry.key)) { - tempMap["${entry.key}$fieldNameSuffixIndex"] = entry.value + /** + * Traverses document fields in leaves recursively and appends [fieldNameSuffixIndex] to field names with same names + * but different mappings & [fieldNameSuffixPattern] to field names which have unique names. + * + * Example for index name is my_log_index and Monitor ID is TReewWdsf2gdJFV: + * { { + * "a": { "a": { + * "b": 1234 ----> "b_my_log_index_TReewWdsf2gdJFV": 1234 + * } } + * } + * + * @param jsonAsMap Input JSON (as Map) + * @param fieldNameSuffix Field suffix which is appended to existing field name + */ + private fun transformDocumentFieldNames( + jsonAsMap: MutableMap, + conflictingFields: List, + fieldNameSuffixPattern: String, + fieldNameSuffixIndex: String, + fieldNamePrefix: String, + ) { + val tempMap = mutableMapOf() + val it: MutableIterator> = jsonAsMap.entries.iterator() + while (it.hasNext()) { + val entry = it.next() + if (entry.value is Map<*, *>) { + transformDocumentFieldNames( + entry.value as MutableMap, + conflictingFields, + fieldNameSuffixPattern, + fieldNameSuffixIndex, + if (fieldNamePrefix == "") entry.key else "$fieldNamePrefix.${entry.key}", + ) + } else if (!entry.key.endsWith(fieldNameSuffixPattern) && !entry.key.endsWith(fieldNameSuffixIndex)) { + var alreadyReplaced = false + conflictingFields.forEach { conflictingField -> + if (conflictingField == "$fieldNamePrefix.${entry.key}" || + (fieldNamePrefix == "" && conflictingField == entry.key) + ) { + tempMap["${entry.key}$fieldNameSuffixIndex"] = entry.value + it.remove() + alreadyReplaced = true + } + } + if (!alreadyReplaced) { + tempMap["${entry.key}$fieldNameSuffixPattern"] = entry.value it.remove() - alreadyReplaced = true } } - if (!alreadyReplaced) { - tempMap["${entry.key}$fieldNameSuffixPattern"] = entry.value - it.remove() - } } + jsonAsMap.putAll(tempMap) } - jsonAsMap.putAll(tempMap) - } - - private fun shouldPerformPercolateQueryAndFlushInMemoryDocs( - numDocs: Int - ): Boolean { - return isInMemoryDocsSizeExceedingMemoryLimit(docsSizeOfBatchInBytes) || - isInMemoryNumDocsExceedingMaxDocsPerPercolateQueryLimit(numDocs) - } - /** - * Returns true, if the docs fetched from shards thus far amount to less than threshold - * amount of percentage (default:10. setting is dynamic and configurable) of the total heap size or not. - * - */ - private fun isInMemoryDocsSizeExceedingMemoryLimit(docsBytesSize: Long): Boolean { - var thresholdPercentage = percQueryDocsSizeMemoryPercentageLimit - val heapMaxBytes = JvmStats.jvmStats().mem.heapMax.bytes - val thresholdBytes = (thresholdPercentage.toDouble() / 100.0) * heapMaxBytes - - return docsBytesSize > thresholdBytes - } + private fun shouldPerformPercolateQueryAndFlushInMemoryDocs(numDocs: Int): Boolean = + isInMemoryDocsSizeExceedingMemoryLimit(docsSizeOfBatchInBytes) || + isInMemoryNumDocsExceedingMaxDocsPerPercolateQueryLimit(numDocs) + + /** + * Returns true, if the docs fetched from shards thus far amount to less than threshold + * amount of percentage (default:10. setting is dynamic and configurable) of the total heap size or not. + * + */ + private fun isInMemoryDocsSizeExceedingMemoryLimit(docsBytesSize: Long): Boolean { + var thresholdPercentage = percQueryDocsSizeMemoryPercentageLimit + val heapMaxBytes = + JvmStats + .jvmStats() + .mem.heapMax.bytes + val thresholdBytes = (thresholdPercentage.toDouble() / 100.0) * heapMaxBytes + + return docsBytesSize > thresholdBytes + } - private fun isInMemoryNumDocsExceedingMaxDocsPerPercolateQueryLimit(numDocs: Int): Boolean { - var maxNumDocsThreshold = percQueryMaxNumDocsInMemory - return numDocs >= maxNumDocsThreshold - } + private fun isInMemoryNumDocsExceedingMaxDocsPerPercolateQueryLimit(numDocs: Int): Boolean { + var maxNumDocsThreshold = percQueryMaxNumDocsInMemory + return numDocs >= maxNumDocsThreshold + } - /** - * Performs an mGet request to retrieve the documents associated with findings. - * - * When possible, this will only retrieve the document fields that are specifically - * referenced for printing in the mustache template. - */ - private suspend fun getDocSources( - findingToDocPairs: List>, - monitor: Monitor, - findingIdToDocSource: MutableMap - ) { - val docFieldTags = parseSampleDocTags(monitor.triggers) - - // Perform mGet request in batches. - findingToDocPairs.chunked(findingsIndexBatchSize).forEach { batch -> - val request = MultiGetRequest() - val docIdToFindingId = mutableMapOf() - - batch.forEach { (findingId, docIdAndIndex) -> - val docIdAndIndexSplit = docIdAndIndex.split("|") - val docId = docIdAndIndexSplit[0] - docIdToFindingId[docId] = findingId - - val concreteIndex = docIdAndIndexSplit[1] - if (findingId.isNotEmpty() && docId.isNotEmpty() && concreteIndex.isNotEmpty()) { - val docItem = MultiGetRequest.Item(concreteIndex, docId) - if (docFieldTags.isNotEmpty()) - docItem.fetchSourceContext(FetchSourceContext(true, docFieldTags.toTypedArray(), emptyArray())) - request.add(docItem) + /** + * Performs an mGet request to retrieve the documents associated with findings. + * + * When possible, this will only retrieve the document fields that are specifically + * referenced for printing in the mustache template. + */ + private suspend fun getDocSources( + findingToDocPairs: List>, + monitor: Monitor, + findingIdToDocSource: MutableMap, + ) { + val docFieldTags = parseSampleDocTags(monitor.triggers) + + // Perform mGet request in batches. + findingToDocPairs.chunked(findingsIndexBatchSize).forEach { batch -> + val request = MultiGetRequest() + val docIdToFindingId = mutableMapOf() + + batch.forEach { (findingId, docIdAndIndex) -> + val docIdAndIndexSplit = docIdAndIndex.split("|") + val docId = docIdAndIndexSplit[0] + docIdToFindingId[docId] = findingId + + val concreteIndex = docIdAndIndexSplit[1] + if (findingId.isNotEmpty() && docId.isNotEmpty() && concreteIndex.isNotEmpty()) { + val docItem = MultiGetRequest.Item(concreteIndex, docId) + if (docFieldTags.isNotEmpty()) { + docItem.fetchSourceContext(FetchSourceContext(true, docFieldTags.toTypedArray(), emptyArray())) + } + request.add(docItem) + } } - } - val startMget = System.currentTimeMillis() - val response = client.suspendUntil { client.multiGet(request, it) } - val mgetDuration = System.currentTimeMillis() - startMget - log.debug( - "DocLevelMonitor ${monitor.id} mget retrieved [${response.responses.size}] documents. Took: ${mgetDuration}ms" - ) - response.responses.forEach { item -> - val findingId = docIdToFindingId[item.id] - if (findingId != null) { - findingIdToDocSource[findingId] = item - } else { - log.error("Unable to find finding ID for document with ID [${item.id}] for monitor [${monitor.id}]") + val startMget = System.currentTimeMillis() + val response = client.suspendUntil { client.multiGet(request, it) } + val mgetDuration = System.currentTimeMillis() - startMget + log.debug( + "DocLevelMonitor ${monitor.id} mget retrieved [${response.responses.size}] documents. Took: ${mgetDuration}ms", + ) + response.responses.forEach { item -> + val findingId = docIdToFindingId[item.id] + if (findingId != null) { + findingIdToDocSource[findingId] = item + } else { + log.error("Unable to find finding ID for document with ID [${item.id}] for monitor [${monitor.id}]") + } } } } - } - /** - * The "destination" ID referenced in a Monitor Action could either be a Notification config or a Destination config - * depending on whether the background migration process has already migrated it from a Destination to a Notification config. - * - * To cover both of these cases, the Notification config will take precedence and if it is not found, the Destination will be retrieved. - */ - private suspend fun getConfigForNotificationAction( - action: Action - ): NotificationActionConfigs { - var destination: Destination? = null - var notificationPermissionException: Exception? = null - - var channel: NotificationConfigInfo? = null - try { - channel = - NotificationApiUtils.getNotificationConfigInfo(client as NodeClient, action.destinationId) - } catch (e: OpenSearchSecurityException) { - notificationPermissionException = e - } + /** + * The "destination" ID referenced in a Monitor Action could either be a Notification config or a Destination config + * depending on whether the background migration process has already migrated it from a Destination to a Notification config. + * + * To cover both of these cases, the Notification config will take precedence and if it is not found, the Destination will be retrieved. + */ + private suspend fun getConfigForNotificationAction(action: Action): NotificationActionConfigs { + var destination: Destination? = null + var notificationPermissionException: Exception? = null + + var channel: NotificationConfigInfo? = null + try { + channel = + NotificationApiUtils.getNotificationConfigInfo(client as NodeClient, action.destinationId) + } catch (e: OpenSearchSecurityException) { + notificationPermissionException = e + } - // If the channel was not found, try to retrieve the Destination - if (channel == null) { - destination = try { - val table = Table( - "asc", - "destination.name.keyword", - null, - 1, - 0, - null - ) - val getDestinationsRequest = GetDestinationsRequest( - action.destinationId, - 0L, - null, - table, - "ALL" - ) + // If the channel was not found, try to retrieve the Destination + if (channel == null) { + destination = + try { + val table = + Table( + "asc", + "destination.name.keyword", + null, + 1, + 0, + null, + ) + val getDestinationsRequest = + GetDestinationsRequest( + action.destinationId, + 0L, + null, + table, + "ALL", + ) - val getDestinationsResponse: GetDestinationsResponse = client.suspendUntil { - client.execute(GetDestinationsAction.INSTANCE, getDestinationsRequest, it) - } - getDestinationsResponse.destinations.firstOrNull() - } catch (e: IllegalStateException) { - // Catching the exception thrown when the Destination was not found so the NotificationActionConfigs object can be returned - null - } catch (e: OpenSearchSecurityException) { - if (notificationPermissionException != null) + val getDestinationsResponse: GetDestinationsResponse = + client.suspendUntil { + client.execute(GetDestinationsAction.INSTANCE, getDestinationsRequest, it) + } + getDestinationsResponse.destinations.firstOrNull() + } catch (e: IllegalStateException) { + // Catching the exception thrown when the Destination was not found so the NotificationActionConfigs object can be returned + null + } catch (e: OpenSearchSecurityException) { + if (notificationPermissionException != null) { + throw notificationPermissionException + } else { + throw e + } + } + + if (destination == null && notificationPermissionException != null) { throw notificationPermissionException - else - throw e + } } - if (destination == null && notificationPermissionException != null) - throw notificationPermissionException + return NotificationActionConfigs(destination, channel) } - return NotificationActionConfigs(destination, channel) - } - - private fun getDestinationContextFactory(): DestinationContextFactory { - val destinationSettings = DestinationSettings.loadDestinationSettings(settings) - return DestinationContextFactory(client, xContentRegistry, destinationSettings) - } - - private fun compileTemplate(template: Script, ctx: TriggerExecutionContext): String { - return scriptService.compile(template, TemplateScript.CONTEXT) - .newInstance(template.params + mapOf("ctx" to ctx.asTemplateArg())) - .execute() - } + private fun getDestinationContextFactory(): DestinationContextFactory { + val destinationSettings = DestinationSettings.loadDestinationSettings(settings) + return DestinationContextFactory(client, xContentRegistry, destinationSettings) + } - private suspend fun onSuccessfulMonitorRun(monitor: Monitor) { - alertService.clearMonitorErrorAlert(monitor) - if (monitor.dataSources.alertsHistoryIndex != null) { - alertService.moveClearedErrorAlertsToHistory( - monitor.id, - monitor.dataSources.alertsIndex, - monitor.dataSources.alertsHistoryIndex!! - ) + private fun compileTemplate( + template: Script, + ctx: TriggerExecutionContext, + ): String = + scriptService + .compile(template, TemplateScript.CONTEXT) + .newInstance(template.params + mapOf("ctx" to ctx.asTemplateArg())) + .execute() + + private suspend fun onSuccessfulMonitorRun(monitor: Monitor) { + alertService.clearMonitorErrorAlert(monitor) + if (monitor.dataSources.alertsHistoryIndex != null) { + alertService.moveClearedErrorAlertsToHistory( + monitor.id, + monitor.dataSources.alertsIndex, + monitor.dataSources.alertsHistoryIndex!!, + ) + } } - } - private fun constructErrorMessageFromTriggerResults( - triggerResults: MutableMap? = null - ): String { - var errorMessage = "" - if (triggerResults != null) { - val triggersErrorBuilder = StringBuilder() - triggerResults.forEach { - if (it.value.error != null) { - triggersErrorBuilder.append("[${it.key}]: [${it.value.error!!.userErrorMessage()}]").append(" | ") + private fun constructErrorMessageFromTriggerResults( + triggerResults: MutableMap? = null, + ): String { + var errorMessage = "" + if (triggerResults != null) { + val triggersErrorBuilder = StringBuilder() + triggerResults.forEach { + if (it.value.error != null) { + triggersErrorBuilder.append("[${it.key}]: [${it.value.error!!.userErrorMessage()}]").append(" | ") + } + } + if (triggersErrorBuilder.isNotEmpty()) { + errorMessage = "Trigger errors: $triggersErrorBuilder" } } - if (triggersErrorBuilder.isNotEmpty()) { - errorMessage = "Trigger errors: $triggersErrorBuilder" - } + return errorMessage } - return errorMessage - } - /** - * POJO holding information about each doc's concrete index, id, input index pattern/alias/datastream name - * and doc source. A list of these POJOs would be passed to percolate query execution logic. - */ - data class TransformedDocDto( - var indexName: String, - var concreteIndexName: String, - var docId: String, - var docSource: BytesReference - ) -} + /** + * POJO holding information about each doc's concrete index, id, input index pattern/alias/datastream name + * and doc source. A list of these POJOs would be passed to percolate query execution logic. + */ + data class TransformedDocDto( + var indexName: String, + var concreteIndexName: String, + var docId: String, + var docSource: BytesReference, + ) + } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt index 5d8999904..84efa1358 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteMonitorAction.kt @@ -49,127 +49,150 @@ import java.util.Locale private val log = LogManager.getLogger(TransportExecuteMonitorAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportExecuteMonitorAction @Inject constructor( - private val transportService: TransportService, - private val client: Client, - private val clusterService: ClusterService, - private val runner: MonitorRunnerService, - actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry, - private val docLevelMonitorQueries: DocLevelMonitorQueries, - private val settings: Settings -) : HandledTransportAction ( - ExecuteMonitorAction.NAME, transportService, actionFilters, ::ExecuteMonitorRequest -) { - @Volatile private var indexTimeout = AlertingSettings.INDEX_TIMEOUT.get(settings) +class TransportExecuteMonitorAction + @Inject + constructor( + private val transportService: TransportService, + private val client: Client, + private val clusterService: ClusterService, + private val runner: MonitorRunnerService, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, + private val docLevelMonitorQueries: DocLevelMonitorQueries, + private val settings: Settings, + ) : HandledTransportAction( + ExecuteMonitorAction.NAME, + transportService, + actionFilters, + ::ExecuteMonitorRequest, + ) { + @Volatile private var indexTimeout = AlertingSettings.INDEX_TIMEOUT.get(settings) - override fun doExecute(task: Task, execMonitorRequest: ExecuteMonitorRequest, actionListener: ActionListener) { - - val userStr = client.threadPool().threadContext.getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) - log.debug("User and roles string from thread context: $userStr") - val user: User? = User.parse(userStr) + override fun doExecute( + task: Task, + execMonitorRequest: ExecuteMonitorRequest, + actionListener: ActionListener, + ) { + val userStr = + client.threadPool().threadContext.getTransient( + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + ) + log.debug("User and roles string from thread context: $userStr") + val user: User? = User.parse(userStr) - client.threadPool().threadContext.stashContext().use { - val executeMonitor = fun(monitor: Monitor) { - // Launch the coroutine with the clients threadContext. This is needed to preserve authentication information - // stored on the threadContext set by the security plugin when using the Alerting plugin with the Security plugin. - // runner.launch(ElasticThreadContextElement(client.threadPool().threadContext)) { - runner.launch { - val (periodStart, periodEnd) = if (execMonitorRequest.requestStart != null) { - Pair( - Instant.ofEpochMilli(execMonitorRequest.requestStart.millis), - Instant.ofEpochMilli(execMonitorRequest.requestEnd.millis) - ) - } else { - monitor.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execMonitorRequest.requestEnd.millis)) - } - try { - log.info( - "Executing monitor from API - id: ${monitor.id}, type: ${monitor.monitorType}, " + - "periodStart: $periodStart, periodEnd: $periodEnd, dryrun: ${execMonitorRequest.dryrun}" - ) - val monitorRunResult = runner.runJob(monitor, periodStart, periodEnd, execMonitorRequest.dryrun, transportService) - withContext(Dispatchers.IO) { - actionListener.onResponse(ExecuteMonitorResponse(monitorRunResult)) - } - } catch (e: Exception) { - log.error("Unexpected error running monitor", e) - withContext(Dispatchers.IO) { - actionListener.onFailure(AlertingException.wrap(e)) + client.threadPool().threadContext.stashContext().use { + val executeMonitor = fun(monitor: Monitor) { + // Launch the coroutine with the clients threadContext. This is needed to preserve authentication information + // stored on the threadContext set by the security plugin when using the Alerting plugin with the Security plugin. + // runner.launch(ElasticThreadContextElement(client.threadPool().threadContext)) { + runner.launch { + val (periodStart, periodEnd) = + if (execMonitorRequest.requestStart != null) { + Pair( + Instant.ofEpochMilli(execMonitorRequest.requestStart.millis), + Instant.ofEpochMilli(execMonitorRequest.requestEnd.millis), + ) + } else { + monitor.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execMonitorRequest.requestEnd.millis)) + } + try { + log.info( + "Executing monitor from API - id: ${monitor.id}, type: ${monitor.monitorType}, " + + "periodStart: $periodStart, periodEnd: $periodEnd, dryrun: ${execMonitorRequest.dryrun}", + ) + val monitorRunResult = + runner.runJob( + monitor, + periodStart, + periodEnd, + execMonitorRequest.dryrun, + transportService, + ) + withContext(Dispatchers.IO) { + actionListener.onResponse(ExecuteMonitorResponse(monitorRunResult)) + } + } catch (e: Exception) { + log.error("Unexpected error running monitor", e) + withContext(Dispatchers.IO) { + actionListener.onFailure(AlertingException.wrap(e)) + } } } } - } - if (execMonitorRequest.monitorId != null) { - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execMonitorRequest.monitorId) - client.get( - getRequest, - object : ActionListener { - override fun onResponse(response: GetResponse) { - if (!response.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Can't find monitor with id: ${response.id}", RestStatus.NOT_FOUND) + if (execMonitorRequest.monitorId != null) { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execMonitorRequest.monitorId) + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Can't find monitor with id: ${response.id}", RestStatus.NOT_FOUND), + ), ) - ) - return - } - if (!response.isSourceEmpty) { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) - validateMonitorV1(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return - } - val monitor = scheduledJob as Monitor - executeMonitor(monitor) + return + } + if (!response.isSourceEmpty) { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) + validateMonitorV1(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } + val monitor = scheduledJob as Monitor + executeMonitor(monitor) + } } } - } - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) + } else { + val monitor = + when (user?.name.isNullOrEmpty()) { + true -> execMonitorRequest.monitor as Monitor + false -> (execMonitorRequest.monitor as Monitor).copy(user = user) } - } - ) - } else { - val monitor = when (user?.name.isNullOrEmpty()) { - true -> execMonitorRequest.monitor as Monitor - false -> (execMonitorRequest.monitor as Monitor).copy(user = user) - } - if ( - monitor.isMonitorOfStandardType() && - Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.DOC_LEVEL_MONITOR - ) { - try { - scope.launch { - if (!docLevelMonitorQueries.docLevelQueryIndexExists(monitor.dataSources)) { - docLevelMonitorQueries.initDocLevelQueryIndex(monitor.dataSources) - log.info("Central Percolation index ${ScheduledJob.DOC_LEVEL_QUERIES_INDEX} created") + if ( + monitor.isMonitorOfStandardType() && + Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.DOC_LEVEL_MONITOR + ) { + try { + scope.launch { + if (!docLevelMonitorQueries.docLevelQueryIndexExists(monitor.dataSources)) { + docLevelMonitorQueries.initDocLevelQueryIndex(monitor.dataSources) + log.info("Central Percolation index ${ScheduledJob.DOC_LEVEL_QUERIES_INDEX} created") + } + val (metadata, _) = MonitorMetadataService.getOrCreateMetadata(monitor, skipIndex = true) + docLevelMonitorQueries.indexDocLevelQueries( + monitor, + monitor.id, + metadata, + WriteRequest.RefreshPolicy.IMMEDIATE, + indexTimeout, + ) + log.info("Queries inserted into Percolate index ${ScheduledJob.DOC_LEVEL_QUERIES_INDEX}") + executeMonitor(monitor) } - val (metadata, _) = MonitorMetadataService.getOrCreateMetadata(monitor, skipIndex = true) - docLevelMonitorQueries.indexDocLevelQueries( - monitor, - monitor.id, - metadata, - WriteRequest.RefreshPolicy.IMMEDIATE, - indexTimeout - ) - log.info("Queries inserted into Percolate index ${ScheduledJob.DOC_LEVEL_QUERIES_INDEX}") - executeMonitor(monitor) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) } - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + } else { + executeMonitor(monitor) } - } else { - executeMonitor(monitor) } } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt index aa3c5af1d..d90bbfab6 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportExecuteWorkflowAction.kt @@ -39,112 +39,125 @@ import java.time.Instant private val log = LogManager.getLogger(TransportExecuteWorkflowAction::class.java) -class TransportExecuteWorkflowAction @Inject constructor( - private val transportService: TransportService, - private val client: Client, - private val runner: MonitorRunnerService, - actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - ExecuteWorkflowAction.NAME, transportService, actionFilters, ::ExecuteWorkflowRequest -) { - override fun doExecute( - task: Task, - execWorkflowRequest: ExecuteWorkflowRequest, - actionListener: ActionListener, - ) { - val userStr = client.threadPool().threadContext.getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) - log.debug("User and roles string from thread context: $userStr") - val user: User? = User.parse(userStr) +class TransportExecuteWorkflowAction + @Inject + constructor( + private val transportService: TransportService, + private val client: Client, + private val runner: MonitorRunnerService, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + ExecuteWorkflowAction.NAME, + transportService, + actionFilters, + ::ExecuteWorkflowRequest, + ) { + override fun doExecute( + task: Task, + execWorkflowRequest: ExecuteWorkflowRequest, + actionListener: ActionListener, + ) { + val userStr = + client.threadPool().threadContext.getTransient( + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + ) + log.debug("User and roles string from thread context: $userStr") + val user: User? = User.parse(userStr) - client.threadPool().threadContext.stashContext().use { - val executeWorkflow = fun(workflow: Workflow) { - runner.launch { - val (periodStart, periodEnd) = if (execWorkflowRequest.requestStart != null) { - Pair( - Instant.ofEpochMilli(execWorkflowRequest.requestStart.millis), - Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis) - ) - } else { - workflow.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis)) - } - try { - log.info( - "Executing workflow from API - id: ${workflow.id}, periodStart: $periodStart, periodEnd: $periodEnd, " + - "dryrun: ${execWorkflowRequest.dryrun}" - ) - val workflowRunResult = - MonitorRunnerService.runJob( - workflow, - periodStart, - periodEnd, - execWorkflowRequest.dryrun, - transportService = transportService - ) - withContext(Dispatchers.IO, { - actionListener.onResponse( - ExecuteWorkflowResponse( - workflowRunResult + client.threadPool().threadContext.stashContext().use { + val executeWorkflow = fun(workflow: Workflow) { + runner.launch { + val (periodStart, periodEnd) = + if (execWorkflowRequest.requestStart != null) { + Pair( + Instant.ofEpochMilli(execWorkflowRequest.requestStart.millis), + Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis), ) + } else { + workflow.schedule.getPeriodEndingAt(Instant.ofEpochMilli(execWorkflowRequest.requestEnd.millis)) + } + try { + log.info( + "Executing workflow from API - id: ${workflow.id}, periodStart: $periodStart, periodEnd: $periodEnd, " + + "dryrun: ${execWorkflowRequest.dryrun}", ) - }) - } catch (e: Exception) { - log.error("Unexpected error running workflow", e) - withContext(Dispatchers.IO) { - actionListener.onFailure(AlertingException.wrap(e)) + val workflowRunResult = + MonitorRunnerService.runJob( + workflow, + periodStart, + periodEnd, + execWorkflowRequest.dryrun, + transportService = transportService, + ) + withContext(Dispatchers.IO, { + actionListener.onResponse( + ExecuteWorkflowResponse( + workflowRunResult, + ), + ) + }) + } catch (e: Exception) { + log.error("Unexpected error running workflow", e) + withContext(Dispatchers.IO) { + actionListener.onFailure(AlertingException.wrap(e)) + } } } } - } - if (execWorkflowRequest.workflowId != null) { - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execWorkflowRequest.workflowId) - client.get( - getRequest, - object : ActionListener { - override fun onResponse(response: GetResponse) { - if (!response.isExists) { - log.error("Can't find workflow with id: ${response.id}") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Can't find workflow with id: ${response.id}", - RestStatus.NOT_FOUND - ) + if (execWorkflowRequest.workflowId != null) { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execWorkflowRequest.workflowId) + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + log.error("Can't find workflow with id: ${response.id}") + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Can't find workflow with id: ${response.id}", + RestStatus.NOT_FOUND, + ), + ), ) - ) - return - } - if (!response.isSourceEmpty) { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) - validateMonitorV1(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return - } - val workflow = scheduledJob as Workflow - executeWorkflow(workflow) + return + } + if (!response.isSourceEmpty) { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) + validateMonitorV1(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } + val workflow = scheduledJob as Workflow + executeWorkflow(workflow) + } } } - } - override fun onFailure(t: Exception) { - log.error("Error getting workflow ${execWorkflowRequest.workflowId}", t) - actionListener.onFailure(AlertingException.wrap(t)) + override fun onFailure(t: Exception) { + log.error("Error getting workflow ${execWorkflowRequest.workflowId}", t) + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) + } else { + val workflow = + when (user?.name.isNullOrEmpty()) { + true -> execWorkflowRequest.workflow as Workflow + false -> (execWorkflowRequest.workflow as Workflow).copy(user = user) } - } - ) - } else { - val workflow = when (user?.name.isNullOrEmpty()) { - true -> execWorkflowRequest.workflow as Workflow - false -> (execWorkflowRequest.workflow as Workflow).copy(user = user) - } - executeWorkflow(workflow) + executeWorkflow(workflow) + } } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt index 00e47a000..8c2eaa56d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetAlertsAction.kt @@ -56,226 +56,246 @@ import java.io.IOException private val log = LogManager.getLogger(TransportGetAlertsAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportGetAlertsAction @Inject constructor( - transportService: TransportService, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val namedWriteableRegistry: NamedWriteableRegistry -) : HandledTransportAction( - AlertingActions.GET_ALERTS_ACTION_NAME, - transportService, - actionFilters, - ::GetAlertsRequest -), - SecureTransportAction { +class TransportGetAlertsAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + AlertingActions.GET_ALERTS_ACTION_NAME, + transportService, + actionFilters, + ::GetAlertsRequest, + ), + SecureTransportAction { + @Volatile + override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - @Volatile - override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - listenFilterBySettingChange(clusterService) - } + init { + listenFilterBySettingChange(clusterService) + } - override fun doExecute( - task: Task, - request: ActionRequest, - actionListener: ActionListener, - ) { - val getAlertsRequest = request as? GetAlertsRequest - ?: recreateObject(request, namedWriteableRegistry) { GetAlertsRequest(it) } - val user = readUserFromThreadContext(client) + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val getAlertsRequest = + request as? GetAlertsRequest + ?: recreateObject(request, namedWriteableRegistry) { GetAlertsRequest(it) } + val user = readUserFromThreadContext(client) - val tableProp = getAlertsRequest.table - val sortBuilder = SortBuilders - .fieldSort(tableProp.sortString) - .order(SortOrder.fromString(tableProp.sortOrder)) - if (!tableProp.missing.isNullOrBlank()) { - sortBuilder.missing(tableProp.missing) - } + val tableProp = getAlertsRequest.table + val sortBuilder = + SortBuilders + .fieldSort(tableProp.sortString) + .order(SortOrder.fromString(tableProp.sortOrder)) + if (!tableProp.missing.isNullOrBlank()) { + sortBuilder.missing(tableProp.missing) + } - val queryBuilder = getAlertsRequest.boolQueryBuilder ?: QueryBuilders.boolQuery() + val queryBuilder = getAlertsRequest.boolQueryBuilder ?: QueryBuilders.boolQuery() - if (getAlertsRequest.severityLevel != "ALL") { - queryBuilder.filter(QueryBuilders.termQuery("severity", getAlertsRequest.severityLevel)) - } + if (getAlertsRequest.severityLevel != "ALL") { + queryBuilder.filter(QueryBuilders.termQuery("severity", getAlertsRequest.severityLevel)) + } - if (getAlertsRequest.alertState == "ALL") { - // alerting dashboards expects chained alerts and individually executed monitors' alerts to be returned from this api - // when invoked with state=ALL. They require that audit alerts are NOT returned in this page - // and only be shown in "associated alerts" field under get workflow_alerts API. - // But if the API is called with query_params: state=AUDIT,monitor_id=<123>,workflow_id=, this api - // will return audit alerts generated by delegate monitor <123> in workflow - queryBuilder.filter(QueryBuilders.boolQuery().mustNot(QueryBuilders.termsQuery(Alert.STATE_FIELD, Alert.State.AUDIT.name))) - } else { - queryBuilder.filter(QueryBuilders.termQuery("state", getAlertsRequest.alertState)) - } + if (getAlertsRequest.alertState == "ALL") { + // alerting dashboards expects chained alerts and individually executed monitors' alerts to be returned from this api + // when invoked with state=ALL. They require that audit alerts are NOT returned in this page + // and only be shown in "associated alerts" field under get workflow_alerts API. + // But if the API is called with query_params: state=AUDIT,monitor_id=<123>,workflow_id=, this api + // will return audit alerts generated by delegate monitor <123> in workflow + queryBuilder.filter(QueryBuilders.boolQuery().mustNot(QueryBuilders.termsQuery(Alert.STATE_FIELD, Alert.State.AUDIT.name))) + } else { + queryBuilder.filter(QueryBuilders.termQuery("state", getAlertsRequest.alertState)) + } - if (getAlertsRequest.alertIds.isNullOrEmpty() == false) { - queryBuilder.filter(QueryBuilders.termsQuery("_id", getAlertsRequest.alertIds)) - } + if (getAlertsRequest.alertIds.isNullOrEmpty() == false) { + queryBuilder.filter(QueryBuilders.termsQuery("_id", getAlertsRequest.alertIds)) + } - if (getAlertsRequest.monitorId != null) { - queryBuilder.filter(QueryBuilders.termQuery("monitor_id", getAlertsRequest.monitorId)) - addWorkflowIdNullOrEmptyCheck(getAlertsRequest, queryBuilder) - } else if (getAlertsRequest.monitorIds.isNullOrEmpty() == false) { - queryBuilder.filter(QueryBuilders.termsQuery("monitor_id", getAlertsRequest.monitorIds)) - addWorkflowIdNullOrEmptyCheck(getAlertsRequest, queryBuilder) - } - if ( - getAlertsRequest.workflowIds.isNullOrEmpty() == false && - !(getAlertsRequest.workflowIds!!.size == 1 && getAlertsRequest.workflowIds!![0] == "") - ) { - queryBuilder.must(QueryBuilders.termsQuery("workflow_id", getAlertsRequest.workflowIds)) - } - if (!tableProp.searchString.isNullOrBlank()) { - queryBuilder - .must( - QueryBuilders - .queryStringQuery(tableProp.searchString) - .defaultOperator(Operator.AND) - .field("monitor_name") - .field("trigger_name") - ) - } - val searchSourceBuilder = SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .query(queryBuilder) - .sort(sortBuilder) - .size(tableProp.size) - .from(tableProp.startIndex) + if (getAlertsRequest.monitorId != null) { + queryBuilder.filter(QueryBuilders.termQuery("monitor_id", getAlertsRequest.monitorId)) + addWorkflowIdNullOrEmptyCheck(getAlertsRequest, queryBuilder) + } else if (getAlertsRequest.monitorIds.isNullOrEmpty() == false) { + queryBuilder.filter(QueryBuilders.termsQuery("monitor_id", getAlertsRequest.monitorIds)) + addWorkflowIdNullOrEmptyCheck(getAlertsRequest, queryBuilder) + } + if ( + getAlertsRequest.workflowIds.isNullOrEmpty() == false && + !(getAlertsRequest.workflowIds!!.size == 1 && getAlertsRequest.workflowIds!![0] == "") + ) { + queryBuilder.must(QueryBuilders.termsQuery("workflow_id", getAlertsRequest.workflowIds)) + } + if (!tableProp.searchString.isNullOrBlank()) { + queryBuilder + .must( + QueryBuilders + .queryStringQuery(tableProp.searchString) + .defaultOperator(Operator.AND) + .field("monitor_name") + .field("trigger_name"), + ) + } + val searchSourceBuilder = + SearchSourceBuilder() + .version(true) + .seqNoAndPrimaryTerm(true) + .query(queryBuilder) + .sort(sortBuilder) + .size(tableProp.size) + .from(tableProp.startIndex) - client.threadPool().threadContext.stashContext().use { - scope.launch { - try { - val alertIndex = resolveAlertsIndexName(getAlertsRequest) - getAlerts(alertIndex, searchSourceBuilder, actionListener, user) - } catch (t: Exception) { - log.error("Failed to get alerts", t) - if (t is AlertingException) { - actionListener.onFailure(t) - } else { - actionListener.onFailure(AlertingException.wrap(t)) + client.threadPool().threadContext.stashContext().use { + scope.launch { + try { + val alertIndex = resolveAlertsIndexName(getAlertsRequest) + getAlerts(alertIndex, searchSourceBuilder, actionListener, user) + } catch (t: Exception) { + log.error("Failed to get alerts", t) + if (t is AlertingException) { + actionListener.onFailure(t) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } } } } } - } - // we add this check when we want to fetch alerts for monitors not generated as part of a workflow i.e. non-delegate monitor alerts - private fun addWorkflowIdNullOrEmptyCheck( - getAlertsRequest: GetAlertsRequest, - queryBuilder: BoolQueryBuilder, - ) { - if ( - getAlertsRequest.workflowIds != null && getAlertsRequest.workflowIds!!.size == 1 && getAlertsRequest.workflowIds!![0] == "" + // we add this check when we want to fetch alerts for monitors not generated as part of a workflow i.e. non-delegate monitor alerts + private fun addWorkflowIdNullOrEmptyCheck( + getAlertsRequest: GetAlertsRequest, + queryBuilder: BoolQueryBuilder, ) { - val noWorkflowIdQuery = QueryBuilders.boolQuery() - .should(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(Alert.WORKFLOW_ID_FIELD))) - .should(QueryBuilders.termsQuery(Alert.WORKFLOW_ID_FIELD, "")) - queryBuilder.must(noWorkflowIdQuery) + if ( + getAlertsRequest.workflowIds != null && getAlertsRequest.workflowIds!!.size == 1 && getAlertsRequest.workflowIds!![0] == "" + ) { + val noWorkflowIdQuery = + QueryBuilders + .boolQuery() + .should(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(Alert.WORKFLOW_ID_FIELD))) + .should(QueryBuilders.termsQuery(Alert.WORKFLOW_ID_FIELD, "")) + queryBuilder.must(noWorkflowIdQuery) + } } - } - /** Precedence order for resolving alert index to be queried: - 1. alertIndex param. - 2. alert index mentioned in monitor data sources. - 3. Default alert indices pattern - */ - suspend fun resolveAlertsIndexName(getAlertsRequest: GetAlertsRequest): String { - var alertIndex = AlertIndices.ALL_ALERT_INDEX_PATTERN - if (getAlertsRequest.alertIndex.isNullOrEmpty() == false) { - alertIndex = getAlertsRequest.alertIndex!! - } else if (getAlertsRequest.monitorId.isNullOrEmpty() == false) { - val retrievedMonitor = getMonitor(getAlertsRequest) - if (retrievedMonitor != null) { - alertIndex = retrievedMonitor.dataSources.alertsIndex + /** Precedence order for resolving alert index to be queried: + 1. alertIndex param. + 2. alert index mentioned in monitor data sources. + 3. Default alert indices pattern + */ + suspend fun resolveAlertsIndexName(getAlertsRequest: GetAlertsRequest): String { + var alertIndex = AlertIndices.ALL_ALERT_INDEX_PATTERN + if (getAlertsRequest.alertIndex.isNullOrEmpty() == false) { + alertIndex = getAlertsRequest.alertIndex!! + } else if (getAlertsRequest.monitorId.isNullOrEmpty() == false) { + val retrievedMonitor = getMonitor(getAlertsRequest) + if (retrievedMonitor != null) { + alertIndex = retrievedMonitor.dataSources.alertsIndex + } + } + return if (alertIndex == AlertIndices.ALERT_INDEX) { + AlertIndices.ALL_ALERT_INDEX_PATTERN + } else { + alertIndex } } - return if (alertIndex == AlertIndices.ALERT_INDEX) - AlertIndices.ALL_ALERT_INDEX_PATTERN - else - alertIndex - } - private suspend fun getMonitor(getAlertsRequest: GetAlertsRequest): Monitor? { - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getAlertsRequest.monitorId!!) - try { - val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } - if (!getResponse.isExists) { + private suspend fun getMonitor(getAlertsRequest: GetAlertsRequest): Monitor? { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getAlertsRequest.monitorId!!) + try { + val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } + if (!getResponse.isExists) { + return null + } + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, + ) + val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) + validateMonitorV1(scheduledJob)?.let { + throw it + } + return scheduledJob as Monitor + } catch (t: Exception) { + log.error("Failure in fetching monitor ${getAlertsRequest.monitorId} to resolve alert index in get alerts action", t) return null } - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) - validateMonitorV1(scheduledJob)?.let { - throw it - } - return scheduledJob as Monitor - } catch (t: Exception) { - log.error("Failure in fetching monitor ${getAlertsRequest.monitorId} to resolve alert index in get alerts action", t) - return null } - } - fun getAlerts( - alertIndex: String, - searchSourceBuilder: SearchSourceBuilder, - actionListener: ActionListener, - user: User?, - ) { - // user is null when: 1/ security is disabled. 2/when user is super-admin. - if (user == null) { + fun getAlerts( + alertIndex: String, + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + user: User?, + ) { // user is null when: 1/ security is disabled. 2/when user is super-admin. - search(alertIndex, searchSourceBuilder, actionListener) - } else if (!doFilterForUser(user)) { - // security is enabled and filterby is disabled. - search(alertIndex, searchSourceBuilder, actionListener) - } else { - // security is enabled and filterby is enabled. - try { - log.info("Filtering result by: ${user.backendRoles}") - addFilter(user, searchSourceBuilder, "monitor_user.backend_roles.keyword") + if (user == null) { + // user is null when: 1/ security is disabled. 2/when user is super-admin. search(alertIndex, searchSourceBuilder, actionListener) - } catch (ex: IOException) { - actionListener.onFailure(AlertingException.wrap(ex)) + } else if (!doFilterForUser(user)) { + // security is enabled and filterby is disabled. + search(alertIndex, searchSourceBuilder, actionListener) + } else { + // security is enabled and filterby is enabled. + try { + log.info("Filtering result by: ${user.backendRoles}") + addFilter(user, searchSourceBuilder, "monitor_user.backend_roles.keyword") + search(alertIndex, searchSourceBuilder, actionListener) + } catch (ex: IOException) { + actionListener.onFailure(AlertingException.wrap(ex)) + } } } - } - fun search(alertIndex: String, searchSourceBuilder: SearchSourceBuilder, actionListener: ActionListener) { - val searchRequest = SearchRequest() - .indices(alertIndex) - .source(searchSourceBuilder) + fun search( + alertIndex: String, + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + ) { + val searchRequest = + SearchRequest() + .indices(alertIndex) + .source(searchSourceBuilder) - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - val totalAlertCount = response.hits.totalHits?.value?.toInt() - val alerts = response.hits.map { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + val totalAlertCount = + response.hits.totalHits + ?.value + ?.toInt() + val alerts = + response.hits.map { hit -> + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert + } + actionListener.onResponse(GetAlertsResponse(alerts, totalAlertCount)) } - actionListener.onResponse(GetAlertsResponse(alerts, totalAlertCount)) - } - override fun onFailure(t: Exception) { - actionListener.onFailure(t) - } - } - ) + override fun onFailure(t: Exception) { + actionListener.onFailure(t) + } + }, + ) + } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt index 60e5edb9d..6ef43fc7d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetDestinationsAction.kt @@ -44,124 +44,143 @@ import java.io.IOException private val log = LogManager.getLogger(TransportGetDestinationsAction::class.java) -class TransportGetDestinationsAction @Inject constructor( - transportService: TransportService, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction ( - GetDestinationsAction.NAME, transportService, actionFilters, ::GetDestinationsRequest -), - SecureTransportAction { - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - listenFilterBySettingChange(clusterService) - } - - override fun doExecute( - task: Task, - getDestinationsRequest: GetDestinationsRequest, - actionListener: ActionListener - ) { - val user = readUserFromThreadContext(client) - val tableProp = getDestinationsRequest.table - - val sortBuilder = SortBuilders - .fieldSort(tableProp.sortString) - .order(SortOrder.fromString(tableProp.sortOrder)) - if (!tableProp.missing.isNullOrBlank()) { - sortBuilder.missing(tableProp.missing) +class TransportGetDestinationsAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + GetDestinationsAction.NAME, + transportService, + actionFilters, + ::GetDestinationsRequest, + ), + SecureTransportAction { + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + listenFilterBySettingChange(clusterService) } - val searchSourceBuilder = SearchSourceBuilder() - .sort(sortBuilder) - .size(tableProp.size) - .from(tableProp.startIndex) - .fetchSource(FetchSourceContext(true, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)) - .seqNoAndPrimaryTerm(true) - .version(true) - val queryBuilder = QueryBuilders.boolQuery() - .must(QueryBuilders.existsQuery("destination")) + override fun doExecute( + task: Task, + getDestinationsRequest: GetDestinationsRequest, + actionListener: ActionListener, + ) { + val user = readUserFromThreadContext(client) + val tableProp = getDestinationsRequest.table + + val sortBuilder = + SortBuilders + .fieldSort(tableProp.sortString) + .order(SortOrder.fromString(tableProp.sortOrder)) + if (!tableProp.missing.isNullOrBlank()) { + sortBuilder.missing(tableProp.missing) + } - if (!getDestinationsRequest.destinationId.isNullOrBlank()) - queryBuilder.filter(QueryBuilders.termQuery("_id", getDestinationsRequest.destinationId)) + val searchSourceBuilder = + SearchSourceBuilder() + .sort(sortBuilder) + .size(tableProp.size) + .from(tableProp.startIndex) + .fetchSource(FetchSourceContext(true, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)) + .seqNoAndPrimaryTerm(true) + .version(true) + val queryBuilder = + QueryBuilders + .boolQuery() + .must(QueryBuilders.existsQuery("destination")) + + if (!getDestinationsRequest.destinationId.isNullOrBlank()) { + queryBuilder.filter(QueryBuilders.termQuery("_id", getDestinationsRequest.destinationId)) + } - if (getDestinationsRequest.destinationType != "ALL") - queryBuilder.filter(QueryBuilders.termQuery("destination.type", getDestinationsRequest.destinationType)) + if (getDestinationsRequest.destinationType != "ALL") { + queryBuilder.filter(QueryBuilders.termQuery("destination.type", getDestinationsRequest.destinationType)) + } - if (!tableProp.searchString.isNullOrBlank()) { - queryBuilder - .must( - QueryBuilders - .queryStringQuery(tableProp.searchString) - .defaultOperator(Operator.AND) - .field("destination.type") - .field("destination.name") - ) - } - searchSourceBuilder.query(queryBuilder) + if (!tableProp.searchString.isNullOrBlank()) { + queryBuilder + .must( + QueryBuilders + .queryStringQuery(tableProp.searchString) + .defaultOperator(Operator.AND) + .field("destination.type") + .field("destination.name"), + ) + } + searchSourceBuilder.query(queryBuilder) - client.threadPool().threadContext.stashContext().use { - resolve(searchSourceBuilder, actionListener, user) + client.threadPool().threadContext.stashContext().use { + resolve(searchSourceBuilder, actionListener, user) + } } - } - fun resolve( - searchSourceBuilder: SearchSourceBuilder, - actionListener: ActionListener, - user: User? - ) { - if (user == null) { - // user is null when: 1/ security is disabled. 2/when user is super-admin. - search(searchSourceBuilder, actionListener) - } else if (!doFilterForUser(user)) { - // security is enabled and filterby is disabled. - search(searchSourceBuilder, actionListener) - } else { - // security is enabled and filterby is enabled. - try { - log.info("Filtering result by: ${user.backendRoles}") - addFilter(user, searchSourceBuilder, "destination.user.backend_roles.keyword") + fun resolve( + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + user: User?, + ) { + if (user == null) { + // user is null when: 1/ security is disabled. 2/when user is super-admin. + search(searchSourceBuilder, actionListener) + } else if (!doFilterForUser(user)) { + // security is enabled and filterby is disabled. search(searchSourceBuilder, actionListener) - } catch (ex: IOException) { - actionListener.onFailure(AlertingException.wrap(ex)) + } else { + // security is enabled and filterby is enabled. + try { + log.info("Filtering result by: ${user.backendRoles}") + addFilter(user, searchSourceBuilder, "destination.user.backend_roles.keyword") + search(searchSourceBuilder, actionListener) + } catch (ex: IOException) { + actionListener.onFailure(AlertingException.wrap(ex)) + } } } - } - fun search(searchSourceBuilder: SearchSourceBuilder, actionListener: ActionListener) { - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - val totalDestinationCount = response.hits.totalHits?.value?.toInt() - val destinations = mutableListOf() - for (hit in response.hits) { - val id = hit.id - val version = hit.version - val seqNo = hit.seqNo.toInt() - val primaryTerm = hit.primaryTerm.toInt() - val xcp = XContentType.JSON.xContent() - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - destinations.add(Destination.parse(xcp, id, version, seqNo, primaryTerm)) + fun search( + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + ) { + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + val totalDestinationCount = + response.hits.totalHits + ?.value + ?.toInt() + val destinations = mutableListOf() + for (hit in response.hits) { + val id = hit.id + val version = hit.version + val seqNo = hit.seqNo.toInt() + val primaryTerm = hit.primaryTerm.toInt() + val xcp = + XContentType.JSON + .xContent() + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + destinations.add(Destination.parse(xcp, id, version, seqNo, primaryTerm)) + } + actionListener.onResponse(GetDestinationsResponse(RestStatus.OK, totalDestinationCount, destinations)) } - actionListener.onResponse(GetDestinationsResponse(RestStatus.OK, totalDestinationCount, destinations)) - } - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - } - } - ) + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) + } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt index bbcf02cfb..209cbe86e 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailAccountAction.kt @@ -35,81 +35,92 @@ import org.opensearch.transport.client.Client private val log = LogManager.getLogger(TransportGetEmailAccountAction::class.java) -class TransportGetEmailAccountAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - settings: Settings, - val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - GetEmailAccountAction.NAME, transportService, actionFilters, ::GetEmailAccountRequest -) { +class TransportGetEmailAccountAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + GetEmailAccountAction.NAME, + transportService, + actionFilters, + ::GetEmailAccountRequest, + ) { + @Volatile private var allowList = ALLOW_LIST.get(settings) - @Volatile private var allowList = ALLOW_LIST.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } - } - - override fun doExecute( - task: Task, - getEmailAccountRequest: GetEmailAccountRequest, - actionListener: ActionListener - ) { + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } + } - if (!allowList.contains(DestinationType.EMAIL.value)) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", - RestStatus.FORBIDDEN - ) + override fun doExecute( + task: Task, + getEmailAccountRequest: GetEmailAccountRequest, + actionListener: ActionListener, + ) { + if (!allowList.contains(DestinationType.EMAIL.value)) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", + RestStatus.FORBIDDEN, + ), + ), ) - ) - return - } + return + } - val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, getEmailAccountRequest.emailAccountID) - .version(getEmailAccountRequest.version) - .fetchSourceContext(getEmailAccountRequest.srcContext) - client.threadPool().threadContext.stashContext().use { - client.get( - getRequest, - object : ActionListener { - override fun onResponse(response: GetResponse) { - if (!response.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Email Account not found.", RestStatus.NOT_FOUND) + val getRequest = + GetRequest(SCHEDULED_JOBS_INDEX, getEmailAccountRequest.emailAccountID) + .version(getEmailAccountRequest.version) + .fetchSourceContext(getEmailAccountRequest.srcContext) + client.threadPool().threadContext.stashContext().use { + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Email Account not found.", RestStatus.NOT_FOUND), + ), ) - ) - return - } + return + } - var emailAccount: EmailAccount? = null - if (!response.isSourceEmpty) { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - emailAccount = EmailAccount.parseWithType(xcp, response.id, response.version) + var emailAccount: EmailAccount? = null + if (!response.isSourceEmpty) { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + emailAccount = EmailAccount.parseWithType(xcp, response.id, response.version) + } } - } - actionListener.onResponse( - GetEmailAccountResponse( - response.id, response.version, response.seqNo, response.primaryTerm, - RestStatus.OK, emailAccount + actionListener.onResponse( + GetEmailAccountResponse( + response.id, + response.version, + response.seqNo, + response.primaryTerm, + RestStatus.OK, + emailAccount, + ), ) - ) - } + } - override fun onFailure(e: Exception) { - actionListener.onFailure(e) - } - } - ) + override fun onFailure(e: Exception) { + actionListener.onFailure(e) + } + }, + ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt index 88becd6ba..03c20316c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetEmailGroupAction.kt @@ -35,81 +35,92 @@ import org.opensearch.transport.client.Client private val log = LogManager.getLogger(TransportGetEmailGroupAction::class.java) -class TransportGetEmailGroupAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - settings: Settings, - val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - GetEmailGroupAction.NAME, transportService, actionFilters, ::GetEmailGroupRequest -) { +class TransportGetEmailGroupAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + GetEmailGroupAction.NAME, + transportService, + actionFilters, + ::GetEmailGroupRequest, + ) { + @Volatile private var allowList = ALLOW_LIST.get(settings) - @Volatile private var allowList = ALLOW_LIST.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } - } - - override fun doExecute( - task: Task, - getEmailGroupRequest: GetEmailGroupRequest, - actionListener: ActionListener - ) { + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } + } - if (!allowList.contains(DestinationType.EMAIL.value)) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", - RestStatus.FORBIDDEN - ) + override fun doExecute( + task: Task, + getEmailGroupRequest: GetEmailGroupRequest, + actionListener: ActionListener, + ) { + if (!allowList.contains(DestinationType.EMAIL.value)) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", + RestStatus.FORBIDDEN, + ), + ), ) - ) - return - } + return + } - val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, getEmailGroupRequest.emailGroupID) - .version(getEmailGroupRequest.version) - .fetchSourceContext(getEmailGroupRequest.srcContext) - client.threadPool().threadContext.stashContext().use { - client.get( - getRequest, - object : ActionListener { - override fun onResponse(response: GetResponse) { - if (!response.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Email Group not found.", RestStatus.NOT_FOUND) + val getRequest = + GetRequest(SCHEDULED_JOBS_INDEX, getEmailGroupRequest.emailGroupID) + .version(getEmailGroupRequest.version) + .fetchSourceContext(getEmailGroupRequest.srcContext) + client.threadPool().threadContext.stashContext().use { + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Email Group not found.", RestStatus.NOT_FOUND), + ), ) - ) - return - } + return + } - var emailGroup: EmailGroup? = null - if (!response.isSourceEmpty) { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - emailGroup = EmailGroup.parseWithType(xcp, response.id, response.version) + var emailGroup: EmailGroup? = null + if (!response.isSourceEmpty) { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + emailGroup = EmailGroup.parseWithType(xcp, response.id, response.version) + } } - } - actionListener.onResponse( - GetEmailGroupResponse( - response.id, response.version, response.seqNo, response.primaryTerm, - RestStatus.OK, emailGroup + actionListener.onResponse( + GetEmailGroupResponse( + response.id, + response.version, + response.seqNo, + response.primaryTerm, + RestStatus.OK, + emailGroup, + ), ) - ) - } + } - override fun onFailure(e: Exception) { - actionListener.onFailure(e) - } - } - ) + override fun onFailure(e: Exception) { + actionListener.onFailure(e) + } + }, + ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt index c4a1f2dbb..9b513688b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetFindingsAction.kt @@ -54,174 +54,190 @@ import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import org.opensearch.transport.client.Client -private val log = LogManager.getLogger(TransportGetFindingsSearchAction::class.java) +private val log = LogManager.getLogger(TransportGetFindingsAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportGetFindingsSearchAction @Inject constructor( - transportService: TransportService, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val namedWriteableRegistry: NamedWriteableRegistry -) : HandledTransportAction ( - AlertingActions.GET_FINDINGS_ACTION_NAME, transportService, actionFilters, ::GetFindingsRequest -), - SecureTransportAction { - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - listenFilterBySettingChange(clusterService) - } - - override fun doExecute( - task: Task, - request: ActionRequest, - actionListener: ActionListener - ) { - val getFindingsRequest = request as? GetFindingsRequest - ?: recreateObject(request, namedWriteableRegistry) { GetFindingsRequest(it) } - val tableProp = getFindingsRequest.table - - val sortBuilder = SortBuilders - .fieldSort(tableProp.sortString) - .order(SortOrder.fromString(tableProp.sortOrder)) - if (!tableProp.missing.isNullOrBlank()) { - sortBuilder.missing(tableProp.missing) +class TransportGetFindingsAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + AlertingActions.GET_FINDINGS_ACTION_NAME, + transportService, + actionFilters, + ::GetFindingsRequest, + ), + SecureTransportAction { + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + listenFilterBySettingChange(clusterService) } - val searchSourceBuilder = SearchSourceBuilder() - .sort(sortBuilder) - .size(tableProp.size) - .from(tableProp.startIndex) - .fetchSource(FetchSourceContext(true, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)) - .seqNoAndPrimaryTerm(true) - .version(true) - - val queryBuilder = getFindingsRequest.boolQueryBuilder ?: QueryBuilders.boolQuery() - - if (!getFindingsRequest.findingId.isNullOrBlank()) - queryBuilder.filter(QueryBuilders.termQuery("_id", getFindingsRequest.findingId)) - if (getFindingsRequest.monitorId != null) { - queryBuilder.filter(QueryBuilders.termQuery("monitor_id", getFindingsRequest.monitorId)) - } else if (getFindingsRequest.monitorIds.isNullOrEmpty() == false) { - queryBuilder.filter(QueryBuilders.termsQuery("monitor_id", getFindingsRequest.monitorIds)) - } + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val getFindingsRequest = + request as? GetFindingsRequest + ?: recreateObject(request, namedWriteableRegistry) { GetFindingsRequest(it) } + val tableProp = getFindingsRequest.table + + val sortBuilder = + SortBuilders + .fieldSort(tableProp.sortString) + .order(SortOrder.fromString(tableProp.sortOrder)) + if (!tableProp.missing.isNullOrBlank()) { + sortBuilder.missing(tableProp.missing) + } + + val searchSourceBuilder = + SearchSourceBuilder() + .sort(sortBuilder) + .size(tableProp.size) + .from(tableProp.startIndex) + .fetchSource(FetchSourceContext(true, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)) + .seqNoAndPrimaryTerm(true) + .version(true) + + val queryBuilder = getFindingsRequest.boolQueryBuilder ?: QueryBuilders.boolQuery() - if (!tableProp.searchString.isNullOrBlank()) { - queryBuilder - .should( - QueryBuilders - .queryStringQuery(tableProp.searchString) - ) - .should( - QueryBuilders.nestedQuery( - "queries", - QueryBuilders.boolQuery() - .must( - QueryBuilders - .queryStringQuery(tableProp.searchString) - .defaultOperator(Operator.AND) - .field("queries.tags") - .field("queries.name") - ), - ScoreMode.Avg + if (!getFindingsRequest.findingId.isNullOrBlank()) { + queryBuilder.filter(QueryBuilders.termQuery("_id", getFindingsRequest.findingId)) + } + if (getFindingsRequest.monitorId != null) { + queryBuilder.filter(QueryBuilders.termQuery("monitor_id", getFindingsRequest.monitorId)) + } else if (getFindingsRequest.monitorIds.isNullOrEmpty() == false) { + queryBuilder.filter(QueryBuilders.termsQuery("monitor_id", getFindingsRequest.monitorIds)) + } + + if (!tableProp.searchString.isNullOrBlank()) { + queryBuilder + .should( + QueryBuilders + .queryStringQuery(tableProp.searchString), + ).should( + QueryBuilders.nestedQuery( + "queries", + QueryBuilders + .boolQuery() + .must( + QueryBuilders + .queryStringQuery(tableProp.searchString) + .defaultOperator(Operator.AND) + .field("queries.tags") + .field("queries.name"), + ), + ScoreMode.Avg, + ), ) - ) - } - searchSourceBuilder.query(queryBuilder).trackTotalHits(true) - client.threadPool().threadContext.stashContext().use { - scope.launch { - try { - val indexName = resolveFindingsIndexName(getFindingsRequest) - val getFindingsResponse = search(searchSourceBuilder, indexName) - actionListener.onResponse(getFindingsResponse) - } catch (t: AlertingException) { - actionListener.onFailure(t) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + } + searchSourceBuilder.query(queryBuilder).trackTotalHits(true) + client.threadPool().threadContext.stashContext().use { + scope.launch { + try { + val indexName = resolveFindingsIndexName(getFindingsRequest) + val getFindingsResponse = search(searchSourceBuilder, indexName) + actionListener.onResponse(getFindingsResponse) + } catch (t: AlertingException) { + actionListener.onFailure(t) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } } } } - } - suspend fun resolveFindingsIndexName(findingsRequest: GetFindingsRequest): String { - var indexName = ALL_FINDING_INDEX_PATTERN - - if (findingsRequest.findingIndex.isNullOrEmpty() == false) { - // findingIndex has highest priority, so use that if available - indexName = findingsRequest.findingIndex!! - } else if (findingsRequest.monitorId.isNullOrEmpty() == false) { - // second best is monitorId. - // We will use it to fetch monitor and then read indexName from dataSources field of monitor - withContext(Dispatchers.IO) { - val getMonitorRequest = GetMonitorRequest( - findingsRequest.monitorId!!, - -3L, - RestRequest.Method.GET, - FetchSourceContext.FETCH_SOURCE - ) - val getMonitorResponse: GetMonitorResponse = - this@TransportGetFindingsSearchAction.client.suspendUntil { - execute(AlertingActions.GET_MONITOR_ACTION_TYPE, getMonitorRequest, it) - } - indexName = getMonitorResponse.monitor?.dataSources?.findingsIndex ?: ALL_FINDING_INDEX_PATTERN + suspend fun resolveFindingsIndexName(findingsRequest: GetFindingsRequest): String { + var indexName = ALL_FINDING_INDEX_PATTERN + + if (findingsRequest.findingIndex.isNullOrEmpty() == false) { + // findingIndex has highest priority, so use that if available + indexName = findingsRequest.findingIndex!! + } else if (findingsRequest.monitorId.isNullOrEmpty() == false) { + // second best is monitorId. + // We will use it to fetch monitor and then read indexName from dataSources field of monitor + withContext(Dispatchers.IO) { + val getMonitorRequest = + GetMonitorRequest( + findingsRequest.monitorId!!, + -3L, + RestRequest.Method.GET, + FetchSourceContext.FETCH_SOURCE, + ) + val getMonitorResponse: GetMonitorResponse = + this@TransportGetFindingsAction.client.suspendUntil { + execute(AlertingActions.GET_MONITOR_ACTION_TYPE, getMonitorRequest, it) + } + indexName = getMonitorResponse.monitor?.dataSources?.findingsIndex ?: ALL_FINDING_INDEX_PATTERN + } } + return indexName } - return indexName - } - suspend fun search(searchSourceBuilder: SearchSourceBuilder, indexName: String): GetFindingsResponse { - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(indexName) - val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - val totalFindingCount = searchResponse.hits.totalHits?.value?.toInt() - val mgetRequest = MultiGetRequest() - val findingsWithDocs = mutableListOf() - val findings = mutableListOf() - for (hit in searchResponse.hits) { - val xcp = XContentType.JSON.xContent() - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val finding = Finding.parse(xcp) - findings.add(finding) - val documentIds = finding.relatedDocIds - // Add getRequests to mget request - documentIds.forEach { docId -> - mgetRequest.add(MultiGetRequest.Item(finding.index, docId)) + suspend fun search( + searchSourceBuilder: SearchSourceBuilder, + indexName: String, + ): GetFindingsResponse { + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(indexName) + val searchResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + val totalFindingCount = + searchResponse.hits.totalHits + ?.value + ?.toInt() + val mgetRequest = MultiGetRequest() + val findingsWithDocs = mutableListOf() + val findings = mutableListOf() + for (hit in searchResponse.hits) { + val xcp = + XContentType.JSON + .xContent() + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val finding = Finding.parse(xcp) + findings.add(finding) + val documentIds = finding.relatedDocIds + // Add getRequests to mget request + documentIds.forEach { docId -> + mgetRequest.add(MultiGetRequest.Item(finding.index, docId)) + } } - } - val documents = if (mgetRequest.items.isEmpty()) mutableMapOf() else searchDocument(mgetRequest) - findings.forEach { - val documentIds = it.relatedDocIds - val relatedDocs = mutableListOf() - for (docId in documentIds) { - val key = "${it.index}|$docId" - documents[key]?.let { document -> relatedDocs.add(document) } + val documents = if (mgetRequest.items.isEmpty()) mutableMapOf() else searchDocument(mgetRequest) + findings.forEach { + val documentIds = it.relatedDocIds + val relatedDocs = mutableListOf() + for (docId in documentIds) { + val key = "${it.index}|$docId" + documents[key]?.let { document -> relatedDocs.add(document) } + } + findingsWithDocs.add(FindingWithDocs(it, relatedDocs)) } - findingsWithDocs.add(FindingWithDocs(it, relatedDocs)) + + return GetFindingsResponse(searchResponse.status(), totalFindingCount, findingsWithDocs) } - return GetFindingsResponse(searchResponse.status(), totalFindingCount, findingsWithDocs) - } + // TODO: Verify what happens if indices are closed/deleted + suspend fun searchDocument(mgetRequest: MultiGetRequest): Map { + val response: MultiGetResponse = client.suspendUntil { client.multiGet(mgetRequest, it) } + val documents: MutableMap = mutableMapOf() + response.responses.forEach { + val key = "${it.index}|${it.id}" + val isDocFound = !(it.isFailed || it.response.sourceAsString == null) + val docData = if (isDocFound) it.response.sourceAsString else "" + val findingDocument = FindingDocument(it.index, it.id, isDocFound, docData) + documents[key] = findingDocument + } - // TODO: Verify what happens if indices are closed/deleted - suspend fun searchDocument( - mgetRequest: MultiGetRequest - ): Map { - val response: MultiGetResponse = client.suspendUntil { client.multiGet(mgetRequest, it) } - val documents: MutableMap = mutableMapOf() - response.responses.forEach { - val key = "${it.index}|${it.id}" - val isDocFound = !(it.isFailed || it.response.sourceAsString == null) - val docData = if (isDocFound) it.response.sourceAsString else "" - val findingDocument = FindingDocument(it.index, it.id, isDocFound, docData) - documents[key] = findingDocument + return documents } - - return documents } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt index ad8c8d021..bfa68aa23 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetMonitorAction.kt @@ -52,43 +52,50 @@ import org.opensearch.transport.client.Client private val log = LogManager.getLogger(TransportGetMonitorAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportGetMonitorAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry, - val clusterService: ClusterService, - settings: Settings, -) : HandledTransportAction( - AlertingActions.GET_MONITOR_ACTION_NAME, - transportService, - actionFilters, - ::GetMonitorRequest -), - SecureTransportAction { - - @Volatile - override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - listenFilterBySettingChange(clusterService) - } +class TransportGetMonitorAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, + val clusterService: ClusterService, + settings: Settings, + ) : HandledTransportAction( + AlertingActions.GET_MONITOR_ACTION_NAME, + transportService, + actionFilters, + ::GetMonitorRequest, + ), + SecureTransportAction { + @Volatile + override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + listenFilterBySettingChange(clusterService) + } - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - val transformedRequest = request as? GetMonitorRequest - ?: recreateObject(request) { - GetMonitorRequest(it) - } + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val transformedRequest = + request as? GetMonitorRequest + ?: recreateObject(request) { + GetMonitorRequest(it) + } - val user = readUserFromThreadContext(client) + val user = readUserFromThreadContext(client) - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.monitorId) - .version(transformedRequest.version) - .fetchSourceContext(transformedRequest.srcContext) + val getRequest = + GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, transformedRequest.monitorId) + .version(transformedRequest.version) + .fetchSourceContext(transformedRequest.srcContext) - if (!validateUserBackendRoles(user, actionListener)) { - return - } + if (!validateUserBackendRoles(user, actionListener)) { + return + } /* * Remove security context before you call elasticsearch api's. By this time, permissions required @@ -96,119 +103,124 @@ class TransportGetMonitorAction @Inject constructor( * Once system-indices [https://github.com/opendistro-for-elasticsearch/security/issues/666] is done, we * might further improve this logic. Also change try to kotlin-use for auto-closable. */ - client.threadPool().threadContext.stashContext().use { - client.get( - getRequest, - object : ActionListener { - override fun onResponse(response: GetResponse) { - if (!response.isExists) { - actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException("Monitor not found.", RestStatus.NOT_FOUND)) - ) - return - } - - var monitor: Monitor? = null - if (!response.isSourceEmpty) { - XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, - XContentType.JSON - ).use { xcp -> - val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) - - validateMonitorV1(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return - } - - monitor = scheduledJob as Monitor + client.threadPool().threadContext.stashContext().use { + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException("Monitor not found.", RestStatus.NOT_FOUND)), + ) + return + } - // security is enabled and filterby is enabled - if (!checkUserPermissionsWithResource( - user, - monitor?.user, - actionListener, - "monitor", - transformedRequest.monitorId - ) - ) { - return - } + var monitor: Monitor? = null + if (!response.isSourceEmpty) { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) + + validateMonitorV1(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } + + monitor = scheduledJob as Monitor + + // security is enabled and filterby is enabled + if (!checkUserPermissionsWithResource( + user, + monitor?.user, + actionListener, + "monitor", + transformedRequest.monitorId, + ) + ) { + return + } + } } - } - try { - scope.launch { - val associatedCompositeMonitors = getAssociatedWorkflows(response.id) - actionListener.onResponse( - GetMonitorResponse( - response.id, - response.version, - response.seqNo, - response.primaryTerm, - monitor, - associatedCompositeMonitors + try { + scope.launch { + val associatedCompositeMonitors = getAssociatedWorkflows(response.id) + actionListener.onResponse( + GetMonitorResponse( + response.id, + response.version, + response.seqNo, + response.primaryTerm, + monitor, + associatedCompositeMonitors, + ), ) - ) + } + } catch (e: Exception) { + log.error("Failed to get associate workflows in get monitor action", e) } - } catch (e: Exception) { - log.error("Failed to get associate workflows in get monitor action", e) } - } - override fun onFailure(ex: Exception) { - if (isIndexNotFoundException(ex)) { - log.error("Index not found while getting monitor", ex) - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Monitor not found. Backing index is missing.", RestStatus.NOT_FOUND, ex) + override fun onFailure(ex: Exception) { + if (isIndexNotFoundException(ex)) { + log.error("Index not found while getting monitor", ex) + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Monitor not found. Backing index is missing.", RestStatus.NOT_FOUND, ex), + ), ) - ) - } else { - log.error("Unexpected error while getting monitor", ex) - actionListener.onFailure(AlertingException.wrap(ex)) + } else { + log.error("Unexpected error while getting monitor", ex) + actionListener.onFailure(AlertingException.wrap(ex)) + } } - } - } - ) + }, + ) + } } - } - private suspend fun getAssociatedWorkflows(id: String): List { - try { - val associatedWorkflows = mutableListOf() - val queryBuilder = QueryBuilders.nestedQuery( - WORKFLOW_DELEGATE_PATH, - QueryBuilders.boolQuery().must( - QueryBuilders.matchQuery( - WORKFLOW_MONITOR_PATH, - id + private suspend fun getAssociatedWorkflows(id: String): List { + try { + val associatedWorkflows = mutableListOf() + val queryBuilder = + QueryBuilders.nestedQuery( + WORKFLOW_DELEGATE_PATH, + QueryBuilders.boolQuery().must( + QueryBuilders.matchQuery( + WORKFLOW_MONITOR_PATH, + id, + ), + ), + ScoreMode.None, ) - ), - ScoreMode.None - ) - val searchRequest = SearchRequest() - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) - .source(SearchSourceBuilder().query(queryBuilder).fetchField("_id")) - val response: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - - for (hit in response.hits) { - XContentType.JSON.xContent().createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - hit.sourceAsString - ).use { hitsParser -> - val workflow = ScheduledJob.parse(hitsParser, hit.id, hit.version) - if (workflow is Workflow) { - associatedWorkflows.add(AssociatedWorkflow(hit.id, workflow.name)) - } + val searchRequest = + SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .source(SearchSourceBuilder().query(queryBuilder).fetchField("_id")) + val response: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + + for (hit in response.hits) { + XContentType.JSON + .xContent() + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + val workflow = ScheduledJob.parse(hitsParser, hit.id, hit.version) + if (workflow is Workflow) { + associatedWorkflows.add(AssociatedWorkflow(hit.id, workflow.name)) + } + } } + return associatedWorkflows + } catch (e: java.lang.Exception) { + log.error("failed to fetch associated workflows for monitor $id", e) + return emptyList() } - return associatedWorkflows - } catch (e: java.lang.Exception) { - log.error("failed to fetch associated workflows for monitor $id", e) - return emptyList() } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetRemoteIndexesAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetRemoteIndexesAction.kt index b106903a8..158550099 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetRemoteIndexesAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetRemoteIndexesAction.kt @@ -45,157 +45,167 @@ import java.time.Instant private val log = LogManager.getLogger(TransportGetRemoteIndexesAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportGetRemoteIndexesAction @Inject constructor( - val transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry, - val clusterService: ClusterService, - settings: Settings, -) : HandledTransportAction( - GetRemoteIndexesAction.NAME, - transportService, - actionFilters, - ::GetRemoteIndexesRequest -), - SecureTransportAction { - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - @Volatile private var remoteMonitoringEnabled = CROSS_CLUSTER_MONITORING_ENABLED.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(CROSS_CLUSTER_MONITORING_ENABLED) { remoteMonitoringEnabled = it } - listenFilterBySettingChange(clusterService) - } - - override fun doExecute( - task: Task, - request: GetRemoteIndexesRequest, - actionListener: ActionListener - ) { - log.debug("Remote monitoring enabled: {}", remoteMonitoringEnabled) - if (!remoteMonitoringEnabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Remote monitoring is not enabled.", RestStatus.FORBIDDEN) - ) - ) - return +class TransportGetRemoteIndexesAction + @Inject + constructor( + val transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, + val clusterService: ClusterService, + settings: Settings, + ) : HandledTransportAction( + GetRemoteIndexesAction.NAME, + transportService, + actionFilters, + ::GetRemoteIndexesRequest, + ), + SecureTransportAction { + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + @Volatile private var remoteMonitoringEnabled = CROSS_CLUSTER_MONITORING_ENABLED.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(CROSS_CLUSTER_MONITORING_ENABLED) { remoteMonitoringEnabled = it } + listenFilterBySettingChange(clusterService) } - val user = readUserFromThreadContext(client) - if (!validateUserBackendRoles(user, actionListener)) return - - if (!request.isValid()) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException(GetRemoteIndexesRequest.INVALID_PATTERN_MESSAGE, RestStatus.BAD_REQUEST) + override fun doExecute( + task: Task, + request: GetRemoteIndexesRequest, + actionListener: ActionListener, + ) { + log.debug("Remote monitoring enabled: {}", remoteMonitoringEnabled) + if (!remoteMonitoringEnabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Remote monitoring is not enabled.", RestStatus.FORBIDDEN), + ), ) - ) - return - } - - client.threadPool().threadContext.stashContext().use { - scope.launch { - val singleThreadContext = newSingleThreadContext("GetRemoteIndexesActionThread") - withContext(singleThreadContext) { - it.restore() - val clusterIndexesList = mutableListOf() - - var resolveIndexResponse: ResolveIndexAction.Response? = null - try { - resolveIndexResponse = getRemoteClusters(request.indexes) - } catch (e: Exception) { - log.error("Failed to retrieve indexes for request $request", e) - actionListener.onFailure(AlertingException.wrap(e)) - } + return + } - val resolvedIndexes: MutableList = mutableListOf() - if (resolveIndexResponse != null) { - resolveIndexResponse.indices.forEach { resolvedIndexes.add(it.name) } - resolveIndexResponse.aliases.forEach { resolvedIndexes.add(it.name) } - } + val user = readUserFromThreadContext(client) + if (!validateUserBackendRoles(user, actionListener)) return - val clusterIndexesMap = CrossClusterMonitorUtils.separateClusterIndexes(resolvedIndexes, clusterService) + if (!request.isValid()) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException(GetRemoteIndexesRequest.INVALID_PATTERN_MESSAGE, RestStatus.BAD_REQUEST), + ), + ) + return + } - clusterIndexesMap.forEach { (clusterName, indexes) -> - val targetClient = CrossClusterMonitorUtils.getClientForCluster(clusterName, client, clusterService) + client.threadPool().threadContext.stashContext().use { + scope.launch { + val singleThreadContext = newSingleThreadContext("GetRemoteIndexesActionThread") + withContext(singleThreadContext) { + it.restore() + val clusterIndexesList = mutableListOf() - val startTime = Instant.now() - var clusterHealthResponse: ClusterHealthResponse? = null + var resolveIndexResponse: ResolveIndexAction.Response? = null try { - clusterHealthResponse = getHealthStatuses(targetClient, indexes) + resolveIndexResponse = getRemoteClusters(request.indexes) } catch (e: Exception) { - log.error("Failed to retrieve health statuses for request $request", e) + log.error("Failed to retrieve indexes for request $request", e) actionListener.onFailure(AlertingException.wrap(e)) } - val endTime = Instant.now() - val latency = Duration.between(startTime, endTime).toMillis() - var mappingsResponse: GetMappingsResponse? = null - if (request.includeMappings) { + val resolvedIndexes: MutableList = mutableListOf() + if (resolveIndexResponse != null) { + resolveIndexResponse.indices.forEach { resolvedIndexes.add(it.name) } + resolveIndexResponse.aliases.forEach { resolvedIndexes.add(it.name) } + } + + val clusterIndexesMap = CrossClusterMonitorUtils.separateClusterIndexes(resolvedIndexes, clusterService) + + clusterIndexesMap.forEach { (clusterName, indexes) -> + val targetClient = CrossClusterMonitorUtils.getClientForCluster(clusterName, client, clusterService) + + val startTime = Instant.now() + var clusterHealthResponse: ClusterHealthResponse? = null try { - mappingsResponse = getIndexMappings(targetClient, indexes) + clusterHealthResponse = getHealthStatuses(targetClient, indexes) } catch (e: Exception) { - log.error("Failed to retrieve mappings for request $request", e) + log.error("Failed to retrieve health statuses for request $request", e) actionListener.onFailure(AlertingException.wrap(e)) } - } + val endTime = Instant.now() + val latency = Duration.between(startTime, endTime).toMillis() + + var mappingsResponse: GetMappingsResponse? = null + if (request.includeMappings) { + try { + mappingsResponse = getIndexMappings(targetClient, indexes) + } catch (e: Exception) { + log.error("Failed to retrieve mappings for request $request", e) + actionListener.onFailure(AlertingException.wrap(e)) + } + } - val clusterIndexList = mutableListOf() - if (clusterHealthResponse != null) { - indexes.forEach { - clusterIndexList.add( - ClusterIndex( - indexName = it, - indexHealth = clusterHealthResponse.indices[it]?.status, - mappings = mappingsResponse?.mappings?.get(it) + val clusterIndexList = mutableListOf() + if (clusterHealthResponse != null) { + indexes.forEach { + clusterIndexList.add( + ClusterIndex( + indexName = it, + indexHealth = clusterHealthResponse.indices[it]?.status, + mappings = mappingsResponse?.mappings?.get(it), + ), ) - ) + } } - } - clusterIndexesList.add( - ClusterIndexes( - clusterName = clusterName, - clusterHealth = clusterHealthResponse?.status, - hubCluster = clusterName == clusterService.clusterName.value(), - indexes = clusterIndexList, - latency = latency + clusterIndexesList.add( + ClusterIndexes( + clusterName = clusterName, + clusterHealth = clusterHealthResponse?.status, + hubCluster = clusterName == clusterService.clusterName.value(), + indexes = clusterIndexList, + latency = latency, + ), ) - ) + } + actionListener.onResponse(GetRemoteIndexesResponse(clusterIndexes = clusterIndexesList)) } - actionListener.onResponse(GetRemoteIndexesResponse(clusterIndexes = clusterIndexesList)) } } } - } - private suspend fun getRemoteClusters(parsedIndexes: List): ResolveIndexAction.Response { - val resolveRequest = ResolveIndexAction.Request( - parsedIndexes.toTypedArray(), - ResolveIndexAction.Request.DEFAULT_INDICES_OPTIONS - ) + private suspend fun getRemoteClusters(parsedIndexes: List): ResolveIndexAction.Response { + val resolveRequest = + ResolveIndexAction.Request( + parsedIndexes.toTypedArray(), + ResolveIndexAction.Request.DEFAULT_INDICES_OPTIONS, + ) - return client.suspendUntil { - admin().indices().resolveIndex(resolveRequest, it) + return client.suspendUntil { + admin().indices().resolveIndex(resolveRequest, it) + } } - } - private suspend fun getHealthStatuses(targetClient: Client, parsedIndexesNames: List): ClusterHealthResponse { - val clusterHealthRequest = ClusterHealthRequest() - .indices(*parsedIndexesNames.toTypedArray()) - .indicesOptions(IndicesOptions.lenientExpandHidden()) - return targetClient.suspendUntil { - admin().cluster().health(clusterHealthRequest, it) + private suspend fun getHealthStatuses( + targetClient: Client, + parsedIndexesNames: List, + ): ClusterHealthResponse { + val clusterHealthRequest = + ClusterHealthRequest() + .indices(*parsedIndexesNames.toTypedArray()) + .indicesOptions(IndicesOptions.lenientExpandHidden()) + + return targetClient.suspendUntil { + admin().cluster().health(clusterHealthRequest, it) + } } - } - private suspend fun getIndexMappings(targetClient: Client, parsedIndexNames: List): GetMappingsResponse { - val getMappingsRequest = GetMappingsRequest().indices(*parsedIndexNames.toTypedArray()) - return targetClient.suspendUntil { - admin().indices().getMappings(getMappingsRequest, it) + private suspend fun getIndexMappings( + targetClient: Client, + parsedIndexNames: List, + ): GetMappingsResponse { + val getMappingsRequest = GetMappingsRequest().indices(*parsedIndexNames.toTypedArray()) + return targetClient.suspendUntil { + admin().indices().getMappings(getMappingsRequest, it) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt index d0d3e45f7..384916f6c 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAction.kt @@ -33,117 +33,128 @@ import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import org.opensearch.transport.client.Client -class TransportGetWorkflowAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry, - val clusterService: ClusterService, - settings: Settings -) : HandledTransportAction( - AlertingActions.GET_WORKFLOW_ACTION_NAME, transportService, actionFilters, ::GetWorkflowRequest -), - SecureTransportAction { +class TransportGetWorkflowAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, + val clusterService: ClusterService, + settings: Settings, + ) : HandledTransportAction( + AlertingActions.GET_WORKFLOW_ACTION_NAME, + transportService, + actionFilters, + ::GetWorkflowRequest, + ), + SecureTransportAction { + private val log = LogManager.getLogger(javaClass) - private val log = LogManager.getLogger(javaClass) + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - listenFilterBySettingChange(clusterService) - } + init { + listenFilterBySettingChange(clusterService) + } - override fun doExecute(task: Task, getWorkflowRequest: GetWorkflowRequest, actionListener: ActionListener) { - val user = readUserFromThreadContext(client) + override fun doExecute( + task: Task, + getWorkflowRequest: GetWorkflowRequest, + actionListener: ActionListener, + ) { + val user = readUserFromThreadContext(client) - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getWorkflowRequest.workflowId) + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, getWorkflowRequest.workflowId) - if (!validateUserBackendRoles(user, actionListener)) { - return - } + if (!validateUserBackendRoles(user, actionListener)) { + return + } - client.threadPool().threadContext.stashContext().use { - client.get( - getRequest, - object : ActionListener { - override fun onResponse(response: GetResponse) { - if (!response.isExists) { - log.error("Workflow with ${getWorkflowRequest.workflowId} not found") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Workflow not found.", - RestStatus.NOT_FOUND - ) + client.threadPool().threadContext.stashContext().use { + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + log.error("Workflow with ${getWorkflowRequest.workflowId} not found") + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Workflow not found.", + RestStatus.NOT_FOUND, + ), + ), ) - ) - return - } + return + } - var workflow: Workflow? = null - if (!response.isSourceEmpty) { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - val compositeMonitor = ScheduledJob.parse(xcp, response.id, response.version) - if (compositeMonitor is Workflow) { - workflow = compositeMonitor - } else { - log.error("Wrong monitor type returned") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Workflow not found.", - RestStatus.NOT_FOUND + var workflow: Workflow? = null + if (!response.isSourceEmpty) { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + val compositeMonitor = ScheduledJob.parse(xcp, response.id, response.version) + if (compositeMonitor is Workflow) { + workflow = compositeMonitor + } else { + log.error("Wrong monitor type returned") + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Workflow not found.", + RestStatus.NOT_FOUND, + ), + ), ) - ) - ) - return - } + return + } - // security is enabled and filterby is enabled - if (!checkUserPermissionsWithResource( - user, - workflow?.user, - actionListener, - "workflow", - getWorkflowRequest.workflowId - ) - ) { - return - } + // security is enabled and filterby is enabled + if (!checkUserPermissionsWithResource( + user, + workflow?.user, + actionListener, + "workflow", + getWorkflowRequest.workflowId, + ) + ) { + return + } + } } - } - actionListener.onResponse( - GetWorkflowResponse( - response.id, - response.version, - response.seqNo, - response.primaryTerm, - RestStatus.OK, - workflow + actionListener.onResponse( + GetWorkflowResponse( + response.id, + response.version, + response.seqNo, + response.primaryTerm, + RestStatus.OK, + workflow, + ), ) - ) - } + } - override fun onFailure(t: Exception) { - log.error("Getting the workflow failed", t) + override fun onFailure(t: Exception) { + log.error("Getting the workflow failed", t) - if (t is IndexNotFoundException) { - actionListener.onFailure( - OpenSearchStatusException( - "Workflow not found", - RestStatus.NOT_FOUND + if (t is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Workflow not found", + RestStatus.NOT_FOUND, + ), ) - ) - } else { - actionListener.onFailure(AlertingException.wrap(t)) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } } - } - } - ) + }, + ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAlertsAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAlertsAction.kt index dd26bf032..bf488afd9 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAlertsAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportGetWorkflowAlertsAction.kt @@ -51,225 +51,251 @@ import java.io.IOException private val log = LogManager.getLogger(TransportGetAlertsAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportGetWorkflowAlertsAction @Inject constructor( - transportService: TransportService, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, -) : HandledTransportAction( - AlertingActions.GET_WORKFLOW_ALERTS_ACTION_NAME, - transportService, - actionFilters, - ::GetAlertsRequest -), - SecureTransportAction { +class TransportGetWorkflowAlertsAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + AlertingActions.GET_WORKFLOW_ALERTS_ACTION_NAME, + transportService, + actionFilters, + ::GetAlertsRequest, + ), + SecureTransportAction { + @Volatile + override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - @Volatile - override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + @Volatile + private var isAlertHistoryEnabled = AlertingSettings.ALERT_HISTORY_ENABLED.get(settings) - @Volatile - private var isAlertHistoryEnabled = AlertingSettings.ALERT_HISTORY_ENABLED.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERT_HISTORY_ENABLED) { isAlertHistoryEnabled = it } - listenFilterBySettingChange(clusterService) - } + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERT_HISTORY_ENABLED) { isAlertHistoryEnabled = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute( - task: Task, - request: ActionRequest, - actionListener: ActionListener, - ) { - val getWorkflowAlertsRequest = request as? GetWorkflowAlertsRequest - ?: recreateObject(request) { GetWorkflowAlertsRequest(it) } - val user = readUserFromThreadContext(client) + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val getWorkflowAlertsRequest = + request as? GetWorkflowAlertsRequest + ?: recreateObject(request) { GetWorkflowAlertsRequest(it) } + val user = readUserFromThreadContext(client) - val tableProp = getWorkflowAlertsRequest.table - val sortBuilder = SortBuilders.fieldSort(tableProp.sortString) - .order(SortOrder.fromString(tableProp.sortOrder)) - if (!tableProp.missing.isNullOrBlank()) { - sortBuilder.missing(tableProp.missing) - } + val tableProp = getWorkflowAlertsRequest.table + val sortBuilder = + SortBuilders + .fieldSort(tableProp.sortString) + .order(SortOrder.fromString(tableProp.sortOrder)) + if (!tableProp.missing.isNullOrBlank()) { + sortBuilder.missing(tableProp.missing) + } - val queryBuilder = QueryBuilders.boolQuery() + val queryBuilder = QueryBuilders.boolQuery() - if (getWorkflowAlertsRequest.severityLevel != "ALL") { - queryBuilder.filter(QueryBuilders.termQuery("severity", getWorkflowAlertsRequest.severityLevel)) - } + if (getWorkflowAlertsRequest.severityLevel != "ALL") { + queryBuilder.filter(QueryBuilders.termQuery("severity", getWorkflowAlertsRequest.severityLevel)) + } - if (getWorkflowAlertsRequest.alertState == "ALL") { - QueryBuilders.boolQuery() - .filter(QueryBuilders.boolQuery().mustNot(QueryBuilders.termsQuery(Alert.STATE_FIELD, Alert.State.AUDIT.name))) - } else { - queryBuilder.filter(QueryBuilders.termQuery(Alert.STATE_FIELD, getWorkflowAlertsRequest.alertState)) - } + if (getWorkflowAlertsRequest.alertState == "ALL") { + QueryBuilders + .boolQuery() + .filter(QueryBuilders.boolQuery().mustNot(QueryBuilders.termsQuery(Alert.STATE_FIELD, Alert.State.AUDIT.name))) + } else { + queryBuilder.filter(QueryBuilders.termQuery(Alert.STATE_FIELD, getWorkflowAlertsRequest.alertState)) + } - if (getWorkflowAlertsRequest.alertIds.isNullOrEmpty() == false) { - queryBuilder.filter(QueryBuilders.termsQuery("_id", getWorkflowAlertsRequest.alertIds)) - } + if (getWorkflowAlertsRequest.alertIds.isNullOrEmpty() == false) { + queryBuilder.filter(QueryBuilders.termsQuery("_id", getWorkflowAlertsRequest.alertIds)) + } - if (getWorkflowAlertsRequest.monitorIds.isNullOrEmpty() == false) { - queryBuilder.filter(QueryBuilders.termsQuery("monitor_id", getWorkflowAlertsRequest.monitorIds)) - } - if (getWorkflowAlertsRequest.workflowIds.isNullOrEmpty() == false) { - queryBuilder.must(QueryBuilders.termsQuery("workflow_id", getWorkflowAlertsRequest.workflowIds)) - queryBuilder.must(QueryBuilders.termQuery("monitor_id", "")) - } - if (!tableProp.searchString.isNullOrBlank()) { - queryBuilder - .must( - QueryBuilders.queryStringQuery(tableProp.searchString) - .defaultOperator(Operator.AND) - .field("monitor_name") - .field("trigger_name") - ) - } - // if alert id is mentioned we cannot set "from" field as it may not return id. we would be using it to paginate associated alerts - val from = if (getWorkflowAlertsRequest.alertIds.isNullOrEmpty()) - tableProp.startIndex - else 0 + if (getWorkflowAlertsRequest.monitorIds.isNullOrEmpty() == false) { + queryBuilder.filter(QueryBuilders.termsQuery("monitor_id", getWorkflowAlertsRequest.monitorIds)) + } + if (getWorkflowAlertsRequest.workflowIds.isNullOrEmpty() == false) { + queryBuilder.must(QueryBuilders.termsQuery("workflow_id", getWorkflowAlertsRequest.workflowIds)) + queryBuilder.must(QueryBuilders.termQuery("monitor_id", "")) + } + if (!tableProp.searchString.isNullOrBlank()) { + queryBuilder + .must( + QueryBuilders + .queryStringQuery(tableProp.searchString) + .defaultOperator(Operator.AND) + .field("monitor_name") + .field("trigger_name"), + ) + } + // if alert id is mentioned we cannot set "from" field as it may not return id. we would be using it to paginate associated alerts + val from = + if (getWorkflowAlertsRequest.alertIds.isNullOrEmpty()) { + tableProp.startIndex + } else { + 0 + } - val searchSourceBuilder = SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .query(queryBuilder) - .sort(sortBuilder) - .size(tableProp.size) - .from(from) + val searchSourceBuilder = + SearchSourceBuilder() + .version(true) + .seqNoAndPrimaryTerm(true) + .query(queryBuilder) + .sort(sortBuilder) + .size(tableProp.size) + .from(from) - client.threadPool().threadContext.stashContext().use { - scope.launch { - try { - val alertIndex = resolveAlertsIndexName(getWorkflowAlertsRequest) - getAlerts(getWorkflowAlertsRequest, alertIndex, searchSourceBuilder, actionListener, user) - } catch (t: Exception) { - log.error("Failed to get alerts", t) - if (t is AlertingException) { - actionListener.onFailure(t) - } else { - actionListener.onFailure(AlertingException.wrap(t)) + client.threadPool().threadContext.stashContext().use { + scope.launch { + try { + val alertIndex = resolveAlertsIndexName(getWorkflowAlertsRequest) + getAlerts(getWorkflowAlertsRequest, alertIndex, searchSourceBuilder, actionListener, user) + } catch (t: Exception) { + log.error("Failed to get alerts", t) + if (t is AlertingException) { + actionListener.onFailure(t) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } } } } } - } - fun resolveAlertsIndexName(getAlertsRequest: GetWorkflowAlertsRequest): String { - var alertIndex = AlertIndices.ALL_ALERT_INDEX_PATTERN - if (getAlertsRequest.alertIndex.isNullOrEmpty() == false) { - alertIndex = getAlertsRequest.alertIndex!! + fun resolveAlertsIndexName(getAlertsRequest: GetWorkflowAlertsRequest): String { + var alertIndex = AlertIndices.ALL_ALERT_INDEX_PATTERN + if (getAlertsRequest.alertIndex.isNullOrEmpty() == false) { + alertIndex = getAlertsRequest.alertIndex!! + } + return if (alertIndex == AlertIndices.ALERT_INDEX) { + AlertIndices.ALL_ALERT_INDEX_PATTERN + } else { + alertIndex + } } - return if (alertIndex == AlertIndices.ALERT_INDEX) - AlertIndices.ALL_ALERT_INDEX_PATTERN - else - alertIndex - } - fun resolveAssociatedAlertsIndexName(getAlertsRequest: GetWorkflowAlertsRequest): String { - return if (getAlertsRequest.alertIndex.isNullOrEmpty()) AlertIndices.ALL_ALERT_INDEX_PATTERN - else getAlertsRequest.associatedAlertsIndex!! - } + fun resolveAssociatedAlertsIndexName(getAlertsRequest: GetWorkflowAlertsRequest): String = + if (getAlertsRequest.alertIndex.isNullOrEmpty()) { + AlertIndices.ALL_ALERT_INDEX_PATTERN + } else { + getAlertsRequest.associatedAlertsIndex!! + } - suspend fun getAlerts( - getWorkflowAlertsRequest: GetWorkflowAlertsRequest, - alertIndex: String, - searchSourceBuilder: SearchSourceBuilder, - actionListener: ActionListener, - user: User?, - ) { - // user is null when: 1/ security is disabled. 2/when user is super-admin. - if (user == null) { + suspend fun getAlerts( + getWorkflowAlertsRequest: GetWorkflowAlertsRequest, + alertIndex: String, + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + user: User?, + ) { // user is null when: 1/ security is disabled. 2/when user is super-admin. - search(getWorkflowAlertsRequest, alertIndex, searchSourceBuilder, actionListener) - } else if (!doFilterForUser(user)) { - // security is enabled and filterby is disabled. - search(getWorkflowAlertsRequest, alertIndex, searchSourceBuilder, actionListener) - } else { - // security is enabled and filterby is enabled. - try { - log.info("Filtering result by: ${user.backendRoles}") - addFilter(user, searchSourceBuilder, "monitor_user.backend_roles.keyword") + if (user == null) { + // user is null when: 1/ security is disabled. 2/when user is super-admin. + search(getWorkflowAlertsRequest, alertIndex, searchSourceBuilder, actionListener) + } else if (!doFilterForUser(user)) { + // security is enabled and filterby is disabled. search(getWorkflowAlertsRequest, alertIndex, searchSourceBuilder, actionListener) - } catch (ex: IOException) { - actionListener.onFailure(AlertingException.wrap(ex)) + } else { + // security is enabled and filterby is enabled. + try { + log.info("Filtering result by: ${user.backendRoles}") + addFilter(user, searchSourceBuilder, "monitor_user.backend_roles.keyword") + search(getWorkflowAlertsRequest, alertIndex, searchSourceBuilder, actionListener) + } catch (ex: IOException) { + actionListener.onFailure(AlertingException.wrap(ex)) + } } } - } - suspend fun search( - getWorkflowAlertsRequest: GetWorkflowAlertsRequest, - alertIndex: String, - searchSourceBuilder: SearchSourceBuilder, - actionListener: ActionListener, - ) { - try { - val searchRequest = SearchRequest() - .indices(alertIndex) - .source(searchSourceBuilder) - val alerts = mutableListOf() - val associatedAlerts = mutableListOf() + suspend fun search( + getWorkflowAlertsRequest: GetWorkflowAlertsRequest, + alertIndex: String, + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + ) { + try { + val searchRequest = + SearchRequest() + .indices(alertIndex) + .source(searchSourceBuilder) + val alerts = mutableListOf() + val associatedAlerts = mutableListOf() - val response: SearchResponse = client.suspendUntil { search(searchRequest, it) } - val totalAlertCount = response.hits.totalHits?.value?.toInt() - alerts.addAll( - parseAlertsFromSearchResponse(response) - ) - if (alerts.isNotEmpty() && getWorkflowAlertsRequest.getAssociatedAlerts == true) - getAssociatedAlerts( - associatedAlerts, - alerts, - resolveAssociatedAlertsIndexName(getWorkflowAlertsRequest), - getWorkflowAlertsRequest + val response: SearchResponse = client.suspendUntil { search(searchRequest, it) } + val totalAlertCount = + response.hits.totalHits + ?.value + ?.toInt() + alerts.addAll( + parseAlertsFromSearchResponse(response), ) - actionListener.onResponse(GetWorkflowAlertsResponse(alerts, associatedAlerts, totalAlertCount)) - } catch (e: Exception) { - actionListener.onFailure(AlertingException("Failed to get alerts", RestStatus.INTERNAL_SERVER_ERROR, e)) + if (alerts.isNotEmpty() && getWorkflowAlertsRequest.getAssociatedAlerts == true) { + getAssociatedAlerts( + associatedAlerts, + alerts, + resolveAssociatedAlertsIndexName(getWorkflowAlertsRequest), + getWorkflowAlertsRequest, + ) + } + actionListener.onResponse(GetWorkflowAlertsResponse(alerts, associatedAlerts, totalAlertCount)) + } catch (e: Exception) { + actionListener.onFailure(AlertingException("Failed to get alerts", RestStatus.INTERNAL_SERVER_ERROR, e)) + } } - } - private suspend fun getAssociatedAlerts( - associatedAlerts: MutableList, - alerts: MutableList, - alertIndex: String, - getWorkflowAlertsRequest: GetWorkflowAlertsRequest, - ) { - try { - val associatedAlertIds = mutableSetOf() - alerts.forEach { associatedAlertIds.addAll(it.associatedAlertIds) } - if (associatedAlertIds.isEmpty()) return - val queryBuilder = QueryBuilders.boolQuery() - val searchRequest = SearchRequest(alertIndex) - // if chained alert id param is non-null, paginate the associated alerts. - if (getWorkflowAlertsRequest.alertIds.isNullOrEmpty() == false) { - val tableProp = getWorkflowAlertsRequest.table - val sortBuilder = SortBuilders.fieldSort(tableProp.sortString) - .order(SortOrder.fromString(tableProp.sortOrder)) - if (!tableProp.missing.isNullOrBlank()) { - sortBuilder.missing(tableProp.missing) + private suspend fun getAssociatedAlerts( + associatedAlerts: MutableList, + alerts: MutableList, + alertIndex: String, + getWorkflowAlertsRequest: GetWorkflowAlertsRequest, + ) { + try { + val associatedAlertIds = mutableSetOf() + alerts.forEach { associatedAlertIds.addAll(it.associatedAlertIds) } + if (associatedAlertIds.isEmpty()) return + val queryBuilder = QueryBuilders.boolQuery() + val searchRequest = SearchRequest(alertIndex) + // if chained alert id param is non-null, paginate the associated alerts. + if (getWorkflowAlertsRequest.alertIds.isNullOrEmpty() == false) { + val tableProp = getWorkflowAlertsRequest.table + val sortBuilder = + SortBuilders + .fieldSort(tableProp.sortString) + .order(SortOrder.fromString(tableProp.sortOrder)) + if (!tableProp.missing.isNullOrBlank()) { + sortBuilder.missing(tableProp.missing) + } + searchRequest + .source() + .sort(sortBuilder) + .size(tableProp.size) + .from(tableProp.startIndex) } - searchRequest.source().sort(sortBuilder).size(tableProp.size).from(tableProp.startIndex) + queryBuilder.must(QueryBuilders.termsQuery("_id", associatedAlertIds)) + queryBuilder.must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.AUDIT.name)) + searchRequest.source().query(queryBuilder) + val response: SearchResponse = client.suspendUntil { search(searchRequest, it) } + associatedAlerts.addAll(parseAlertsFromSearchResponse(response)) + } catch (e: Exception) { + log.error("Failed to get associated alerts in get workflow alerts action", e) } - queryBuilder.must(QueryBuilders.termsQuery("_id", associatedAlertIds)) - queryBuilder.must(QueryBuilders.termQuery(Alert.STATE_FIELD, Alert.State.AUDIT.name)) - searchRequest.source().query(queryBuilder) - val response: SearchResponse = client.suspendUntil { search(searchRequest, it) } - associatedAlerts.addAll(parseAlertsFromSearchResponse(response)) - } catch (e: Exception) { - log.error("Failed to get associated alerts in get workflow alerts action", e) } - } - private fun parseAlertsFromSearchResponse(response: SearchResponse) = response.hits.map { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert + private fun parseAlertsFromSearchResponse(response: SearchResponse) = + response.hits.map { hit -> + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert + } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexAlertingCommentAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexAlertingCommentAction.kt index 8592c505c..8007ac814 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexAlertingCommentAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexAlertingCommentAction.kt @@ -62,327 +62,334 @@ private val log = LogManager.getLogger(TransportIndexMonitorAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) class TransportIndexAlertingCommentAction -@Inject -constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val commentsIndices: CommentsIndices, - val clusterService: ClusterService, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val namedWriteableRegistry: NamedWriteableRegistry, -) : HandledTransportAction( - AlertingActions.INDEX_COMMENT_ACTION_NAME, - transportService, - actionFilters, - ::IndexCommentRequest, -), - SecureTransportAction { - - @Volatile private var alertingCommentsEnabled = ALERTING_COMMENTS_ENABLED.get(settings) - @Volatile private var commentsMaxContentSize = COMMENTS_MAX_CONTENT_SIZE.get(settings) - @Volatile private var maxCommentsPerAlert = MAX_COMMENTS_PER_ALERT.get(settings) - @Volatile private var indexTimeout = INDEX_TIMEOUT.get(settings) - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_COMMENTS_ENABLED) { alertingCommentsEnabled = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(COMMENTS_MAX_CONTENT_SIZE) { commentsMaxContentSize = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_COMMENTS_PER_ALERT) { maxCommentsPerAlert = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } - listenFilterBySettingChange(clusterService) - } + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val commentsIndices: CommentsIndices, + val clusterService: ClusterService, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + AlertingActions.INDEX_COMMENT_ACTION_NAME, + transportService, + actionFilters, + ::IndexCommentRequest, + ), + SecureTransportAction { + @Volatile private var alertingCommentsEnabled = ALERTING_COMMENTS_ENABLED.get(settings) + + @Volatile private var commentsMaxContentSize = COMMENTS_MAX_CONTENT_SIZE.get(settings) + + @Volatile private var maxCommentsPerAlert = MAX_COMMENTS_PER_ALERT.get(settings) + + @Volatile private var indexTimeout = INDEX_TIMEOUT.get(settings) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_COMMENTS_ENABLED) { alertingCommentsEnabled = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(COMMENTS_MAX_CONTENT_SIZE) { commentsMaxContentSize = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_COMMENTS_PER_ALERT) { maxCommentsPerAlert = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute( - task: Task, - request: ActionRequest, - actionListener: ActionListener, - ) { - // validate feature flag enabled - if (!alertingCommentsEnabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Comments for Alerting is currently disabled", RestStatus.FORBIDDEN), + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + // validate feature flag enabled + if (!alertingCommentsEnabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Comments for Alerting is currently disabled", RestStatus.FORBIDDEN), + ), ) - ) - return - } + return + } - val transformedRequest = - request as? IndexCommentRequest - ?: recreateObject(request, namedWriteableRegistry) { - IndexCommentRequest(it) - } + val transformedRequest = + request as? IndexCommentRequest + ?: recreateObject(request, namedWriteableRegistry) { + IndexCommentRequest(it) + } - // validate comment content size - if (transformedRequest.content.length > commentsMaxContentSize) { - actionListener.onFailure( - AlertingException.wrap( - IllegalArgumentException("Comment content exceeds max length of $commentsMaxContentSize characters"), + // validate comment content size + if (transformedRequest.content.length > commentsMaxContentSize) { + actionListener.onFailure( + AlertingException.wrap( + IllegalArgumentException("Comment content exceeds max length of $commentsMaxContentSize characters"), + ), ) - ) - return - } + return + } - // validate the request is for the correct entity type - if (transformedRequest.entityType != "alert") { - actionListener.onFailure( - AlertingException.wrap( - IllegalArgumentException( - "Index comment request is for wrong entity type, expected alert, got ${transformedRequest.entityType}" - ) + // validate the request is for the correct entity type + if (transformedRequest.entityType != "alert") { + actionListener.onFailure( + AlertingException.wrap( + IllegalArgumentException( + "Index comment request is for wrong entity type, expected alert, got ${transformedRequest.entityType}", + ), + ), ) - ) - return - } + return + } - val user = readUserFromThreadContext(client) + val user = readUserFromThreadContext(client) - client.threadPool().threadContext.stashContext().use { - scope.launch { - IndexCommentHandler(client, actionListener, transformedRequest, user).start() + client.threadPool().threadContext.stashContext().use { + scope.launch { + IndexCommentHandler(client, actionListener, transformedRequest, user).start() + } } } - } - inner class IndexCommentHandler( - private val client: Client, - private val actionListener: ActionListener, - private val request: IndexCommentRequest, - private val user: User?, - ) { - suspend fun start() { - commentsIndices.createOrUpdateInitialCommentsHistoryIndex() - if (request.method == RestRequest.Method.PUT) { - updateComment() - } else { - indexComment() + inner class IndexCommentHandler( + private val client: Client, + private val actionListener: ActionListener, + private val request: IndexCommentRequest, + private val user: User?, + ) { + suspend fun start() { + commentsIndices.createOrUpdateInitialCommentsHistoryIndex() + if (request.method == RestRequest.Method.PUT) { + updateComment() + } else { + indexComment() + } } - } - - private suspend fun indexComment() { - val alert = getAlert() ?: return - val numCommentsOnThisAlert = CommentsUtils.getCommentIDsByAlertIDs(client, listOf(alert.id)).size - if (numCommentsOnThisAlert >= maxCommentsPerAlert) { - actionListener.onFailure( - AlertingException.wrap( - IllegalArgumentException( - "This request would create more than the allowed number of Comments" + - "for this Alert: $maxCommentsPerAlert" - ) - ) - ) - return - } + private suspend fun indexComment() { + val alert = getAlert() ?: return - log.debug("checking user permissions in index comment") - checkUserPermissionsWithResource(user, alert.monitorUser, actionListener, "monitor", alert.monitorId) - - val comment = Comment( - entityId = request.entityId, - entityType = request.entityType, - content = request.content, - createdTime = Instant.now(), - user = user - ) - - val indexRequest = - IndexRequest(COMMENTS_HISTORY_WRITE_INDEX) - .source(comment.toXContentWithUser(XContentFactory.jsonBuilder())) - .setIfSeqNo(request.seqNo) - .setIfPrimaryTerm(request.primaryTerm) - .timeout(indexTimeout) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - - log.debug("Creating new comment: ${comment.toXContentWithUser(XContentFactory.jsonBuilder())}") - - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = checkShardsFailure(indexResponse) - if (failureReasons != null) { + val numCommentsOnThisAlert = CommentsUtils.getCommentIDsByAlertIDs(client, listOf(alert.id)).size + if (numCommentsOnThisAlert >= maxCommentsPerAlert) { actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), + AlertingException.wrap( + IllegalArgumentException( + "This request would create more than the allowed number of Comments" + + "for this Alert: $maxCommentsPerAlert", + ), + ), ) return } - actionListener.onResponse( - IndexCommentResponse(indexResponse.id, indexResponse.seqNo, indexResponse.primaryTerm, comment) - ) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - } - } + log.debug("checking user permissions in index comment") + checkUserPermissionsWithResource(user, alert.monitorUser, actionListener, "monitor", alert.monitorId) - private suspend fun updateComment() { - val currentComment = getComment() ?: return + val comment = + Comment( + entityId = request.entityId, + entityType = request.entityType, + content = request.content, + createdTime = Instant.now(), + user = user, + ) - // check that the user has permissions to edit the comment. user can edit comment if - // - user is Admin - // - user is the author of the comment - if (user != null && !isAdmin(user) && user.name != currentComment.user?.name) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Comment can only be edited by Admin or author of comment", - RestStatus.FORBIDDEN, - ), - ), - ) - return + val indexRequest = + IndexRequest(COMMENTS_HISTORY_WRITE_INDEX) + .source(comment.toXContentWithUser(XContentFactory.jsonBuilder())) + .setIfSeqNo(request.seqNo) + .setIfPrimaryTerm(request.primaryTerm) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + + log.debug("Creating new comment: ${comment.toXContentWithUser(XContentFactory.jsonBuilder())}") + + try { + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = checkShardsFailure(indexResponse) + if (failureReasons != null) { + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), + ) + return + } + + actionListener.onResponse( + IndexCommentResponse(indexResponse.id, indexResponse.seqNo, indexResponse.primaryTerm, comment), + ) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } } - // retains everything from the original comment except content and lastUpdatedTime - val requestComment = currentComment.copy(content = request.content, lastUpdatedTime = Instant.now()) - - val indexRequest = - IndexRequest(COMMENTS_HISTORY_WRITE_INDEX) - .source(requestComment.toXContentWithUser(XContentFactory.jsonBuilder())) - .id(requestComment.id) - .setIfSeqNo(request.seqNo) - .setIfPrimaryTerm(request.primaryTerm) - .timeout(indexTimeout) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - - log.debug( - "Updating comment, ${currentComment.id}, from: " + - "${currentComment.content} to: " + - requestComment.content, - ) - - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = checkShardsFailure(indexResponse) - if (failureReasons != null) { + private suspend fun updateComment() { + val currentComment = getComment() ?: return + + // check that the user has permissions to edit the comment. user can edit comment if + // - user is Admin + // - user is the author of the comment + if (user != null && !isAdmin(user) && user.name != currentComment.user?.name) { actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), + AlertingException.wrap( + OpenSearchStatusException( + "Comment can only be edited by Admin or author of comment", + RestStatus.FORBIDDEN, + ), + ), ) return } - actionListener.onResponse( - IndexCommentResponse( - indexResponse.id, - indexResponse.seqNo, - indexResponse.primaryTerm, - requestComment, - ), - ) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - } - } - - private suspend fun getAlert(): Alert? { - // need to validate the existence of the Alert that user is trying to add Comment to. - // Also need to check if user has permissions to add a Comment to the passed in Alert. To do this, - // we retrieve the Alert to get its associated monitor user, and use that to - // check if they have permissions to the Monitor that generated the Alert - val queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("_id", listOf(request.entityId))) - val searchSourceBuilder = - SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .query(queryBuilder) - - // search all alerts, since user might want to create a comment - // on a completed alert - val searchRequest = - SearchRequest() - .indices(AlertIndices.ALL_ALERT_INDEX_PATTERN) - .source(searchSourceBuilder) - - val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - val alerts = searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON + // retains everything from the original comment except content and lastUpdatedTime + val requestComment = currentComment.copy(content = request.content, lastUpdatedTime = Instant.now()) + + val indexRequest = + IndexRequest(COMMENTS_HISTORY_WRITE_INDEX) + .source(requestComment.toXContentWithUser(XContentFactory.jsonBuilder())) + .id(requestComment.id) + .setIfSeqNo(request.seqNo) + .setIfPrimaryTerm(request.primaryTerm) + .timeout(indexTimeout) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + + log.debug( + "Updating comment, ${currentComment.id}, from: " + + "${currentComment.content} to: " + + requestComment.content, ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert - } - if (alerts.isEmpty()) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Alert not found", RestStatus.NOT_FOUND), + try { + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = checkShardsFailure(indexResponse) + if (failureReasons != null) { + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), + ) + return + } + + actionListener.onResponse( + IndexCommentResponse( + indexResponse.id, + indexResponse.seqNo, + indexResponse.primaryTerm, + requestComment, + ), ) - ) - return null - } else if (alerts.size > 1) { - actionListener.onFailure( - AlertingException.wrap(IllegalStateException("Multiple alerts were found with the same ID")), - ) - return null + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } } - return alerts[0] - } + private suspend fun getAlert(): Alert? { + // need to validate the existence of the Alert that user is trying to add Comment to. + // Also need to check if user has permissions to add a Comment to the passed in Alert. To do this, + // we retrieve the Alert to get its associated monitor user, and use that to + // check if they have permissions to the Monitor that generated the Alert + val queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("_id", listOf(request.entityId))) + val searchSourceBuilder = + SearchSourceBuilder() + .version(true) + .seqNoAndPrimaryTerm(true) + .query(queryBuilder) + + // search all alerts, since user might want to create a comment + // on a completed alert + val searchRequest = + SearchRequest() + .indices(AlertIndices.ALL_ALERT_INDEX_PATTERN) + .source(searchSourceBuilder) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + val alerts = + searchResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert + } + + if (alerts.isEmpty()) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Alert not found", RestStatus.NOT_FOUND), + ), + ) + return null + } else if (alerts.size > 1) { + actionListener.onFailure( + AlertingException.wrap(IllegalStateException("Multiple alerts were found with the same ID")), + ) + return null + } - private suspend fun getComment(): Comment? { - // need to validate the existence of the Alert that user is trying to add Comment to. - // Also need to check if user has permissions to add a Comment to the passed in Alert. To do this, - // we retrieve the Alert to get its associated monitor user, and use that to - // check if they have permissions to the Monitor that generated the Alert - val queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("_id", listOf(request.commentId))) - val searchSourceBuilder = - SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .query(queryBuilder) - - // search all alerts, since user might want to create a comment - // on a completed alert - val searchRequest = - SearchRequest() - .indices(CommentsIndices.ALL_COMMENTS_INDEX_PATTERN) - .source(searchSourceBuilder) - - val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - val comments = searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val comment = Comment.parse(xcp, hit.id) - comment + return alerts[0] } - if (comments.isEmpty()) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Comment not found", RestStatus.NOT_FOUND), - ), - ) - return null - } else if (comments.size > 1) { - actionListener.onFailure( - AlertingException.wrap(IllegalStateException("Multiple comments were found with the same ID")), - ) - return null - } + private suspend fun getComment(): Comment? { + // need to validate the existence of the Alert that user is trying to add Comment to. + // Also need to check if user has permissions to add a Comment to the passed in Alert. To do this, + // we retrieve the Alert to get its associated monitor user, and use that to + // check if they have permissions to the Monitor that generated the Alert + val queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("_id", listOf(request.commentId))) + val searchSourceBuilder = + SearchSourceBuilder() + .version(true) + .seqNoAndPrimaryTerm(true) + .query(queryBuilder) + + // search all alerts, since user might want to create a comment + // on a completed alert + val searchRequest = + SearchRequest() + .indices(CommentsIndices.ALL_COMMENTS_INDEX_PATTERN) + .source(searchSourceBuilder) + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + val comments = + searchResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val comment = Comment.parse(xcp, hit.id) + comment + } + + if (comments.isEmpty()) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Comment not found", RestStatus.NOT_FOUND), + ), + ) + return null + } else if (comments.size > 1) { + actionListener.onFailure( + AlertingException.wrap(IllegalStateException("Multiple comments were found with the same ID")), + ) + return null + } - return comments[0] - } + return comments[0] + } - private fun checkShardsFailure(response: IndexResponse): String? { - val failureReasons = StringBuilder() - if (response.shardInfo.failed > 0) { - response.shardInfo.failures.forEach { entry -> - failureReasons.append(entry.reason()) + private fun checkShardsFailure(response: IndexResponse): String? { + val failureReasons = StringBuilder() + if (response.shardInfo.failed > 0) { + response.shardInfo.failures.forEach { entry -> + failureReasons.append(entry.reason()) + } + return failureReasons.toString() } - return failureReasons.toString() + return null } - return null } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt index 320b82ccc..f5ace9716 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexMonitorAction.kt @@ -91,676 +91,757 @@ import java.util.Locale private val log = LogManager.getLogger(TransportIndexMonitorAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportIndexMonitorAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val scheduledJobIndices: ScheduledJobIndices, - val docLevelMonitorQueries: DocLevelMonitorQueries, - val clusterService: ClusterService, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val namedWriteableRegistry: NamedWriteableRegistry, -) : HandledTransportAction( - AlertingActions.INDEX_MONITOR_ACTION_NAME, transportService, actionFilters, ::IndexMonitorRequest -), - SecureTransportAction { - - @Volatile private var maxMonitors = ALERTING_MAX_MONITORS.get(settings) - @Volatile private var requestTimeout = REQUEST_TIMEOUT.get(settings) - @Volatile private var indexTimeout = INDEX_TIMEOUT.get(settings) - @Volatile private var maxActionThrottle = MAX_ACTION_THROTTLE_VALUE.get(settings) - @Volatile private var allowList = ALLOW_LIST.get(settings) - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_MAX_MONITORS) { maxMonitors = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(REQUEST_TIMEOUT) { requestTimeout = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_ACTION_THROTTLE_VALUE) { maxActionThrottle = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } - listenFilterBySettingChange(clusterService) - } +class TransportIndexMonitorAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val scheduledJobIndices: ScheduledJobIndices, + val docLevelMonitorQueries: DocLevelMonitorQueries, + val clusterService: ClusterService, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + AlertingActions.INDEX_MONITOR_ACTION_NAME, + transportService, + actionFilters, + ::IndexMonitorRequest, + ), + SecureTransportAction { + @Volatile private var maxMonitors = ALERTING_MAX_MONITORS.get(settings) + + @Volatile private var requestTimeout = REQUEST_TIMEOUT.get(settings) + + @Volatile private var indexTimeout = INDEX_TIMEOUT.get(settings) + + @Volatile private var maxActionThrottle = MAX_ACTION_THROTTLE_VALUE.get(settings) + + @Volatile private var allowList = ALLOW_LIST.get(settings) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_MAX_MONITORS) { maxMonitors = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(REQUEST_TIMEOUT) { requestTimeout = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_ACTION_THROTTLE_VALUE) { maxActionThrottle = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - val transformedRequest = request as? IndexMonitorRequest - ?: recreateObject(request, namedWriteableRegistry) { - IndexMonitorRequest(it) - } + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val transformedRequest = + request as? IndexMonitorRequest + ?: recreateObject(request, namedWriteableRegistry) { + IndexMonitorRequest(it) + } - val user = readUserFromThreadContext(client) + val user = readUserFromThreadContext(client) - if (!validateUserBackendRoles(user, actionListener)) { - return - } + if (!validateUserBackendRoles(user, actionListener)) { + return + } - if ( - user != null && - !isAdmin(user) && - transformedRequest.rbacRoles != null - ) { - if (transformedRequest.rbacRoles?.stream()?.anyMatch { !user.backendRoles.contains(it) } == true) { - log.debug( - "User specified backend roles, ${transformedRequest.rbacRoles}, " + - "that they don' have access to. User backend roles: ${user.backendRoles}" - ) - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "User specified backend roles that they don't have access to. Contact administrator", RestStatus.FORBIDDEN - ) + if ( + user != null && + !isAdmin(user) && + transformedRequest.rbacRoles != null + ) { + if (transformedRequest.rbacRoles?.stream()?.anyMatch { !user.backendRoles.contains(it) } == true) { + log.debug( + "User specified backend roles, ${transformedRequest.rbacRoles}, " + + "that they don' have access to. User backend roles: ${user.backendRoles}", ) - ) - return - } else if (transformedRequest.rbacRoles?.isEmpty() == true) { - log.debug( - "Non-admin user are not allowed to specify an empty set of backend roles. " + - "Please don't pass in the parameter or pass in at least one backend role." - ) - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Non-admin user are not allowed to specify an empty set of backend roles.", RestStatus.FORBIDDEN - ) + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "User specified backend roles that they don't have access to. Contact administrator", + RestStatus.FORBIDDEN, + ), + ), ) - ) - return + return + } else if (transformedRequest.rbacRoles?.isEmpty() == true) { + log.debug( + "Non-admin user are not allowed to specify an empty set of backend roles. " + + "Please don't pass in the parameter or pass in at least one backend role.", + ) + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Non-admin user are not allowed to specify an empty set of backend roles.", + RestStatus.FORBIDDEN, + ), + ), + ) + return + } } - } - - if (!isADMonitor(transformedRequest.monitor)) { - checkIndicesAndExecute(client, actionListener, transformedRequest, user) - } else { - // check if user has access to any anomaly detector for AD monitor - checkAnomalyDetectorAndExecute(client, actionListener, transformedRequest, user) - } - } - /** - * Check if user has permissions to read the configured indices on the monitor and - * then create monitor. - */ - fun checkIndicesAndExecute( - client: Client, - actionListener: ActionListener, - request: IndexMonitorRequest, - user: User?, - ) { - val indices = mutableListOf() - // todo: for doc level alerting: check if index is present before monitor is created. - val searchInputs = request.monitor.inputs.filter { - it.name() == SearchInput.SEARCH_FIELD || - it.name() == DOC_LEVEL_INPUT_FIELD || - it.name() == REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD - } - searchInputs.forEach { - val inputIndices = if (it.name() == SearchInput.SEARCH_FIELD) (it as SearchInput).indices - else if (it.name() == DOC_LEVEL_INPUT_FIELD) (it as DocLevelMonitorInput).indices - else (it as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices - indices.addAll(inputIndices) - } - val updatedIndices = indices.map { index -> - if (IndexUtils.isAlias(index, clusterService.state()) || IndexUtils.isDataStream(index, clusterService.state())) { - val metadata = clusterService.state().metadata.indicesLookup[index]?.writeIndex - metadata?.index?.name ?: index + if (!isADMonitor(transformedRequest.monitor)) { + checkIndicesAndExecute(client, actionListener, transformedRequest, user) } else { - index + // check if user has access to any anomaly detector for AD monitor + checkAnomalyDetectorAndExecute(client, actionListener, transformedRequest, user) } } - val searchRequest = SearchRequest().indices(*updatedIndices.toTypedArray()) - .source(SearchSourceBuilder.searchSource().size(1).query(QueryBuilders.matchAllQuery())) - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(searchResponse: SearchResponse) { - // User has read access to configured indices in the monitor, now create monitor with out user context. - client.threadPool().threadContext.stashContext().use { - IndexMonitorHandler(client, actionListener, request, user).resolveUserAndStart() - } - } - // Due to below issue with security plugin, we get security_exception when invalid index name is mentioned. - // https://github.com/opendistro-for-elasticsearch/security/issues/718 - override fun onFailure(t: Exception) { - actionListener.onFailure( - AlertingException.wrap( - when (t is OpenSearchSecurityException) { - true -> OpenSearchStatusException( - "User doesn't have read permissions for one or more configured index " + - "$indices", - RestStatus.FORBIDDEN - ) - false -> t - } - ) - ) + /** + * Check if user has permissions to read the configured indices on the monitor and + * then create monitor. + */ + fun checkIndicesAndExecute( + client: Client, + actionListener: ActionListener, + request: IndexMonitorRequest, + user: User?, + ) { + val indices = mutableListOf() + // todo: for doc level alerting: check if index is present before monitor is created. + val searchInputs = + request.monitor.inputs.filter { + it.name() == SearchInput.SEARCH_FIELD || + it.name() == DOC_LEVEL_INPUT_FIELD || + it.name() == REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD } + searchInputs.forEach { + val inputIndices = + if (it.name() == SearchInput.SEARCH_FIELD) { + (it as SearchInput).indices + } else if (it.name() == DOC_LEVEL_INPUT_FIELD) { + (it as DocLevelMonitorInput).indices + } else { + (it as RemoteDocLevelMonitorInput).docLevelMonitorInput.indices + } + indices.addAll(inputIndices) } - ) - } + val updatedIndices = + indices.map { index -> + if (IndexUtils.isAlias(index, clusterService.state()) || IndexUtils.isDataStream(index, clusterService.state())) { + val metadata = + clusterService + .state() + .metadata.indicesLookup[index] + ?.writeIndex + metadata?.index?.name ?: index + } else { + index + } + } + val searchRequest = + SearchRequest() + .indices(*updatedIndices.toTypedArray()) + .source(SearchSourceBuilder.searchSource().size(1).query(QueryBuilders.matchAllQuery())) + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(searchResponse: SearchResponse) { + // User has read access to configured indices in the monitor, now create monitor with out user context. + client.threadPool().threadContext.stashContext().use { + IndexMonitorHandler(client, actionListener, request, user).resolveUserAndStart() + } + } + + // Due to below issue with security plugin, we get security_exception when invalid index name is mentioned. + // https://github.com/opendistro-for-elasticsearch/security/issues/718 + override fun onFailure(t: Exception) { + actionListener.onFailure( + AlertingException.wrap( + when (t is OpenSearchSecurityException) { + true -> { + OpenSearchStatusException( + "User doesn't have read permissions for one or more configured index " + + "$indices", + RestStatus.FORBIDDEN, + ) + } - /** - * It's no reasonable to create AD monitor if the user has no access to any detector. Otherwise - * the monitor will not get any anomaly result. So we will check user has access to at least 1 - * anomaly detector if they need to create AD monitor. - * As anomaly detector index is system index, common user has no permission to query. So we need - * to send REST API call to AD REST API. - */ - fun checkAnomalyDetectorAndExecute( - client: Client, - actionListener: ActionListener, - request: IndexMonitorRequest, - user: User?, - ) { - client.threadPool().threadContext.stashContext().use { - IndexMonitorHandler(client, actionListener, request, user).resolveUserAndStartForAD() + false -> { + t + } + }, + ), + ) + } + }, + ) } - } - inner class IndexMonitorHandler( - private val client: Client, - private val actionListener: ActionListener, - private val request: IndexMonitorRequest, - private val user: User?, - ) { - - fun resolveUserAndStart() { - if (user == null) { - // Security is disabled, add empty user to Monitor. user is null for older versions. - request.monitor = request.monitor - .copy(user = User("", listOf(), listOf(), mapOf())) - start() - } else { - request.monitor = request.monitor - .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) - start() + /** + * It's no reasonable to create AD monitor if the user has no access to any detector. Otherwise + * the monitor will not get any anomaly result. So we will check user has access to at least 1 + * anomaly detector if they need to create AD monitor. + * As anomaly detector index is system index, common user has no permission to query. So we need + * to send REST API call to AD REST API. + */ + fun checkAnomalyDetectorAndExecute( + client: Client, + actionListener: ActionListener, + request: IndexMonitorRequest, + user: User?, + ) { + client.threadPool().threadContext.stashContext().use { + IndexMonitorHandler(client, actionListener, request, user).resolveUserAndStartForAD() } } - fun resolveUserAndStartForAD() { - if (user == null) { - // Security is disabled, add empty user to Monitor. user is null for older versions. - request.monitor = request.monitor - .copy(user = User("", listOf(), listOf(), mapOf())) - start() - } else { - try { - request.monitor = request.monitor - .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) - val searchSourceBuilder = SearchSourceBuilder().size(0) - if (getRoleFilterEnabled(clusterService, settings, "plugins.anomaly_detection.filter_by_backend_roles")) { - addUserBackendRolesFilter(user, searchSourceBuilder) + inner class IndexMonitorHandler( + private val client: Client, + private val actionListener: ActionListener, + private val request: IndexMonitorRequest, + private val user: User?, + ) { + fun resolveUserAndStart() { + if (user == null) { + // Security is disabled, add empty user to Monitor. user is null for older versions. + request.monitor = + request.monitor + .copy(user = User("", listOf(), listOf(), mapOf())) + start() + } else { + request.monitor = + request.monitor + .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) + start() + } + } + + fun resolveUserAndStartForAD() { + if (user == null) { + // Security is disabled, add empty user to Monitor. user is null for older versions. + request.monitor = + request.monitor + .copy(user = User("", listOf(), listOf(), mapOf())) + start() + } else { + try { + request.monitor = + request.monitor + .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) + val searchSourceBuilder = SearchSourceBuilder().size(0) + if (getRoleFilterEnabled(clusterService, settings, "plugins.anomaly_detection.filter_by_backend_roles")) { + addUserBackendRolesFilter(user, searchSourceBuilder) + } + val searchRequest = SearchRequest().indices(".opendistro-anomaly-detectors").source(searchSourceBuilder) + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse?) { + val totalHits = response?.hits?.totalHits?.value + if (totalHits != null && totalHits > 0L) { + start() + } else { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("User has no available detectors", RestStatus.NOT_FOUND), + ), + ) + } + } + + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) + } catch (ex: IOException) { + actionListener.onFailure(AlertingException.wrap(ex)) } - val searchRequest = SearchRequest().indices(".opendistro-anomaly-detectors").source(searchSourceBuilder) - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse?) { - val totalHits = response?.hits?.totalHits?.value - if (totalHits != null && totalHits > 0L) { - start() + } + } + + fun start() { + if (!scheduledJobIndices.scheduledJobIndexExists()) { + scheduledJobIndices.initScheduledJobIndex( + object : ActionListener { + override fun onResponse(response: CreateIndexResponse) { + onCreateMappingsResponse(response.isAcknowledged) + } + + override fun onFailure(t: Exception) { + // https://github.com/opensearch-project/alerting/issues/646 + if (ExceptionsHelper.unwrapCause(t) is ResourceAlreadyExistsException) { + scope.launch { + // Wait for the yellow status + val request = + ClusterHealthRequest() + .indices(SCHEDULED_JOBS_INDEX) + .waitForYellowStatus() + val response: ClusterHealthResponse = + client.suspendUntil { + execute(ClusterHealthAction.INSTANCE, request, it) + } + if (response.isTimedOut) { + actionListener.onFailure( + OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy"), + ) + } + // Retry mapping of monitor + onCreateMappingsResponse(true) + } } else { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("User has no available detectors", RestStatus.NOT_FOUND) - ) - ) + actionListener.onFailure(AlertingException.wrap(t)) } } + }, + ) + } else if (!IndexUtils.scheduledJobIndexUpdated) { + IndexUtils.updateIndexMapping( + SCHEDULED_JOBS_INDEX, + ScheduledJobIndices.scheduledJobMappings(), + clusterService.state(), + client.admin().indices(), + object : ActionListener { + override fun onResponse(response: AcknowledgedResponse) { + onUpdateMappingsResponse(response) + } override fun onFailure(t: Exception) { actionListener.onFailure(AlertingException.wrap(t)) } - } + }, ) - } catch (ex: IOException) { - actionListener.onFailure(AlertingException.wrap(ex)) + } else { + prepareMonitorIndexing() } } - } - fun start() { - if (!scheduledJobIndices.scheduledJobIndexExists()) { - scheduledJobIndices.initScheduledJobIndex(object : ActionListener { - override fun onResponse(response: CreateIndexResponse) { - onCreateMappingsResponse(response.isAcknowledged) - } - override fun onFailure(t: Exception) { - // https://github.com/opensearch-project/alerting/issues/646 - if (ExceptionsHelper.unwrapCause(t) is ResourceAlreadyExistsException) { - scope.launch { - // Wait for the yellow status - val request = ClusterHealthRequest() - .indices(SCHEDULED_JOBS_INDEX) - .waitForYellowStatus() - val response: ClusterHealthResponse = client.suspendUntil { - execute(ClusterHealthAction.INSTANCE, request, it) - } - if (response.isTimedOut) { - actionListener.onFailure( - OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy") - ) - } - // Retry mapping of monitor - onCreateMappingsResponse(true) - } - } else { - actionListener.onFailure(AlertingException.wrap(t)) - } - } - }) - } else if (!IndexUtils.scheduledJobIndexUpdated) { - IndexUtils.updateIndexMapping( - SCHEDULED_JOBS_INDEX, - ScheduledJobIndices.scheduledJobMappings(), clusterService.state(), client.admin().indices(), - object : ActionListener { - override fun onResponse(response: AcknowledgedResponse) { - onUpdateMappingsResponse(response) - } - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - } - } - ) - } else { - prepareMonitorIndexing() - } - } + /** + * This function prepares for indexing a new monitor. + * If this is an update request we can simply update the monitor. Otherwise we first check to see how many monitors already exist, + * and compare this to the [maxMonitorCount]. Requests that breach this threshold will be rejected. + */ + private fun prepareMonitorIndexing() { + // Below check needs to be async operations and needs to be refactored issue#269 + // checkForDisallowedDestinations(allowList) - /** - * This function prepares for indexing a new monitor. - * If this is an update request we can simply update the monitor. Otherwise we first check to see how many monitors already exist, - * and compare this to the [maxMonitorCount]. Requests that breach this threshold will be rejected. - */ - private fun prepareMonitorIndexing() { + try { + validateActionThrottle(request.monitor, maxActionThrottle, TimeValue.timeValueMinutes(1)) + } catch (e: RuntimeException) { + actionListener.onFailure(AlertingException.wrap(e)) + return + } - // Below check needs to be async operations and needs to be refactored issue#269 - // checkForDisallowedDestinations(allowList) + if (request.method == RestRequest.Method.PUT) { + scope.launch { + updateMonitor() + } + } else { + val query = + QueryBuilders.boolQuery().filter( + QueryBuilders.termQuery("${Monitor.MONITOR_TYPE}.type", Monitor.MONITOR_TYPE), + ) + val searchSource = SearchSourceBuilder().query(query).timeout(requestTimeout) + val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX).source(searchSource) - try { - validateActionThrottle(request.monitor, maxActionThrottle, TimeValue.timeValueMinutes(1)) - } catch (e: RuntimeException) { - actionListener.onFailure(AlertingException.wrap(e)) - return - } + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(searchResponse: SearchResponse) { + onSearchResponse(searchResponse) + } - if (request.method == RestRequest.Method.PUT) { - scope.launch { - updateMonitor() + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) } - } else { - val query = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("${Monitor.MONITOR_TYPE}.type", Monitor.MONITOR_TYPE)) - val searchSource = SearchSourceBuilder().query(query).timeout(requestTimeout) - val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX).source(searchSource) - - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(searchResponse: SearchResponse) { - onSearchResponse(searchResponse) - } + } - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + private fun validateActionThrottle( + monitor: Monitor, + maxValue: TimeValue, + minValue: TimeValue, + ) { + monitor.triggers.forEach { trigger -> + trigger.actions.forEach { action -> + if (action.throttle != null) { + require( + TimeValue(Duration.of(action.throttle!!.value.toLong(), action.throttle!!.unit).toMillis()) + .compareTo(maxValue) <= 0, + { "Can only set throttle period less than or equal to $maxValue" }, + ) + require( + TimeValue(Duration.of(action.throttle!!.value.toLong(), action.throttle!!.unit).toMillis()) + .compareTo(minValue) >= 0, + { "Can only set throttle period greater than or equal to $minValue" }, + ) } } - ) + } } - } - private fun validateActionThrottle(monitor: Monitor, maxValue: TimeValue, minValue: TimeValue) { - monitor.triggers.forEach { trigger -> - trigger.actions.forEach { action -> - if (action.throttle != null) { - require( - TimeValue(Duration.of(action.throttle!!.value.toLong(), action.throttle!!.unit).toMillis()) - .compareTo(maxValue) <= 0, - { "Can only set throttle period less than or equal to $maxValue" } - ) - require( - TimeValue(Duration.of(action.throttle!!.value.toLong(), action.throttle!!.unit).toMillis()) - .compareTo(minValue) >= 0, - { "Can only set throttle period greater than or equal to $minValue" } - ) + /** + * After searching for all existing monitors we validate the system can support another monitor to be created. + */ + private fun onSearchResponse(response: SearchResponse) { + val totalHits = response.hits.totalHits?.value + if (totalHits != null && totalHits >= maxMonitors) { + log.info("This request would create more than the allowed monitors [$maxMonitors].") + actionListener.onFailure( + AlertingException.wrap( + IllegalArgumentException( + "This request would create more than the allowed monitors [$maxMonitors].", + ), + ), + ) + } else { + scope.launch { + indexMonitor() } } } - } - /** - * After searching for all existing monitors we validate the system can support another monitor to be created. - */ - private fun onSearchResponse(response: SearchResponse) { - val totalHits = response.hits.totalHits?.value - if (totalHits != null && totalHits >= maxMonitors) { - log.info("This request would create more than the allowed monitors [$maxMonitors].") - actionListener.onFailure( - AlertingException.wrap( - IllegalArgumentException( - "This request would create more than the allowed monitors [$maxMonitors]." - ) + private fun onCreateMappingsResponse(isAcknowledged: Boolean) { + if (isAcknowledged) { + log.info("Created $SCHEDULED_JOBS_INDEX with mappings.") + prepareMonitorIndexing() + IndexUtils.scheduledJobIndexUpdated() + } else { + log.info("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged", + RestStatus.INTERNAL_SERVER_ERROR, + ), + ), ) - ) - } else { - scope.launch { - indexMonitor() } } - } - private fun onCreateMappingsResponse(isAcknowledged: Boolean) { - if (isAcknowledged) { - log.info("Created $SCHEDULED_JOBS_INDEX with mappings.") - prepareMonitorIndexing() - IndexUtils.scheduledJobIndexUpdated() - } else { - log.info("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged", RestStatus.INTERNAL_SERVER_ERROR - ) + private fun onUpdateMappingsResponse(response: AcknowledgedResponse) { + if (response.isAcknowledged) { + log.info("Updated ${ScheduledJob.SCHEDULED_JOBS_INDEX} with mappings.") + IndexUtils.scheduledJobIndexUpdated() + prepareMonitorIndexing() + } else { + log.info("Update ${ScheduledJob.SCHEDULED_JOBS_INDEX} mappings call not acknowledged.") + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Updated ${ScheduledJob.SCHEDULED_JOBS_INDEX} mappings call not acknowledged.", + RestStatus.INTERNAL_SERVER_ERROR, + ), + ), ) - ) + } } - } - private fun onUpdateMappingsResponse(response: AcknowledgedResponse) { - if (response.isAcknowledged) { - log.info("Updated ${ScheduledJob.SCHEDULED_JOBS_INDEX} with mappings.") - IndexUtils.scheduledJobIndexUpdated() - prepareMonitorIndexing() - } else { - log.info("Update ${ScheduledJob.SCHEDULED_JOBS_INDEX} mappings call not acknowledged.") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Updated ${ScheduledJob.SCHEDULED_JOBS_INDEX} mappings call not acknowledged.", - RestStatus.INTERNAL_SERVER_ERROR - ) - ) - ) - } - } + private suspend fun indexMonitor() { + if (user != null) { + // Use the backend roles which is an intersection of the requested backend roles and the user's backend roles. + // Admins can pass in any backend role. Also if no backend role is passed in, all the user's backend roles are used. + val rbacRoles = + if (request.rbacRoles == null) { + user.backendRoles.toSet() + } else if (!isAdmin(user)) { + request.rbacRoles?.intersect(user.backendRoles)?.toSet() + } else { + request.rbacRoles + } - private suspend fun indexMonitor() { - if (user != null) { - // Use the backend roles which is an intersection of the requested backend roles and the user's backend roles. - // Admins can pass in any backend role. Also if no backend role is passed in, all the user's backend roles are used. - val rbacRoles = if (request.rbacRoles == null) user.backendRoles.toSet() - else if (!isAdmin(user)) request.rbacRoles?.intersect(user.backendRoles)?.toSet() - else request.rbacRoles + request.monitor = + request.monitor.copy( + user = User(user.name, rbacRoles.orEmpty().toList(), user.roles, user.customAttributes), + ) + log.debug("Created monitor's backend roles: $rbacRoles") + } - request.monitor = request.monitor.copy( - user = User(user.name, rbacRoles.orEmpty().toList(), user.roles, user.customAttributes) + val indexRequest = + IndexRequest(SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(request.refreshPolicy) + .source(request.monitor.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .setIfSeqNo(request.seqNo) + .setIfPrimaryTerm(request.primaryTerm) + .timeout(indexTimeout) + + log.info( + "Creating new monitor: ${request.monitor.toXContentWithUser( + jsonBuilder(), + ToXContent.MapParams(mapOf("with_type" to "true")), + )}", ) - log.debug("Created monitor's backend roles: $rbacRoles") - } - - val indexRequest = IndexRequest(SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(request.refreshPolicy) - .source(request.monitor.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) - .setIfSeqNo(request.seqNo) - .setIfPrimaryTerm(request.primaryTerm) - .timeout(indexTimeout) - - log.info( - "Creating new monitor: ${request.monitor.toXContentWithUser( - jsonBuilder(), - ToXContent.MapParams(mapOf("with_type" to "true")) - )}" - ) - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = checkShardsFailure(indexResponse) - if (failureReasons != null) { - log.info(failureReasons.toString()) - actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())) - ) - return - } - var metadata: MonitorMetadata? - try { // delete monitor if metadata creation fails, log the right error and re-throw the error to fail listener - request.monitor = request.monitor.copy(id = indexResponse.id) - var (monitorMetadata: MonitorMetadata, created: Boolean) = MonitorMetadataService.getOrCreateMetadata(request.monitor) - if (created == false) { - log.warn("Metadata doc id:${monitorMetadata.id} exists, but it shouldn't!") - } - metadata = monitorMetadata - } catch (t: Exception) { - log.error("failed to create metadata for monitor ${indexResponse.id}. deleting monitor") - cleanupMonitorAfterPartialFailure(request.monitor, indexResponse) - throw t - } try { - if ( - request.monitor.isMonitorOfStandardType() && - Monitor.MonitorType.valueOf(request.monitor.monitorType.uppercase(Locale.ROOT)) == - Monitor.MonitorType.DOC_LEVEL_MONITOR - ) { - indexDocLevelMonitorQueries(request.monitor, indexResponse.id, metadata, request.refreshPolicy) + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = checkShardsFailure(indexResponse) + if (failureReasons != null) { + log.info(failureReasons.toString()) + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), + ) + return + } + var metadata: MonitorMetadata? + try { + // delete monitor if metadata creation fails, log the right error and re-throw the error to fail listener + request.monitor = request.monitor.copy(id = indexResponse.id) + var (monitorMetadata: MonitorMetadata, created: Boolean) = + MonitorMetadataService.getOrCreateMetadata( + request.monitor, + ) + if (created == false) { + log.warn("Metadata doc id:${monitorMetadata.id} exists, but it shouldn't!") + } + metadata = monitorMetadata + } catch (t: Exception) { + log.error("failed to create metadata for monitor ${indexResponse.id}. deleting monitor") + cleanupMonitorAfterPartialFailure(request.monitor, indexResponse) + throw t + } + try { + if ( + request.monitor.isMonitorOfStandardType() && + Monitor.MonitorType.valueOf(request.monitor.monitorType.uppercase(Locale.ROOT)) == + Monitor.MonitorType.DOC_LEVEL_MONITOR + ) { + indexDocLevelMonitorQueries(request.monitor, indexResponse.id, metadata, request.refreshPolicy) + } + // When inserting queries in queryIndex we could update sourceToQueryIndexMapping + MonitorMetadataService.upsertMetadata(metadata, updating = true) + } catch (t: Exception) { + log.error("failed to index doc level queries monitor ${indexResponse.id}. deleting monitor", t) + cleanupMonitorAfterPartialFailure(request.monitor, indexResponse) + throw t } - // When inserting queries in queryIndex we could update sourceToQueryIndexMapping - MonitorMetadataService.upsertMetadata(metadata, updating = true) + + actionListener.onResponse( + IndexMonitorResponse( + indexResponse.id, + indexResponse.version, + indexResponse.seqNo, + indexResponse.primaryTerm, + request.monitor, + ), + ) } catch (t: Exception) { - log.error("failed to index doc level queries monitor ${indexResponse.id}. deleting monitor", t) - cleanupMonitorAfterPartialFailure(request.monitor, indexResponse) - throw t + actionListener.onFailure(AlertingException.wrap(t)) } + } - actionListener.onResponse( - IndexMonitorResponse( - indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, request.monitor + private suspend fun cleanupMonitorAfterPartialFailure( + monitor: Monitor, + indexMonitorResponse: IndexResponse, + ) { + // we simply log the success (debug log) or failure (error log) when we try clean up partially failed monitor creation request + try { + DeleteMonitorService.deleteMonitor( + monitor = monitor, + RefreshPolicy.IMMEDIATE, ) - ) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + log.debug( + "Cleaned up monitor related resources after monitor creation request partial failure. " + + "Monitor id : ${indexMonitorResponse.id}", + ) + } catch (e: Exception) { + log.error("Failed to clean up monitor after monitor creation request partial failure", e) + } } - } - private suspend fun cleanupMonitorAfterPartialFailure(monitor: Monitor, indexMonitorResponse: IndexResponse) { - // we simply log the success (debug log) or failure (error log) when we try clean up partially failed monitor creation request - try { - DeleteMonitorService.deleteMonitor( - monitor = monitor, - RefreshPolicy.IMMEDIATE - ) - log.debug( - "Cleaned up monitor related resources after monitor creation request partial failure. " + - "Monitor id : ${indexMonitorResponse.id}" + @Suppress("UNCHECKED_CAST") + private suspend fun indexDocLevelMonitorQueries( + monitor: Monitor, + monitorId: String, + monitorMetadata: MonitorMetadata, + refreshPolicy: RefreshPolicy, + ) { + val queryIndex = monitor.dataSources.queryIndex + if (!docLevelMonitorQueries.docLevelQueryIndexExists(monitor.dataSources)) { + docLevelMonitorQueries.initDocLevelQueryIndex(monitor.dataSources) + log.info("Central Percolation index $queryIndex created") + } + docLevelMonitorQueries.indexDocLevelQueries( + monitor, + monitorId, + monitorMetadata, + refreshPolicy, + indexTimeout, ) - } catch (e: Exception) { - log.error("Failed to clean up monitor after monitor creation request partial failure", e) - } - } - - @Suppress("UNCHECKED_CAST") - private suspend fun indexDocLevelMonitorQueries( - monitor: Monitor, - monitorId: String, - monitorMetadata: MonitorMetadata, - refreshPolicy: RefreshPolicy - ) { - val queryIndex = monitor.dataSources.queryIndex - if (!docLevelMonitorQueries.docLevelQueryIndexExists(monitor.dataSources)) { - docLevelMonitorQueries.initDocLevelQueryIndex(monitor.dataSources) - log.info("Central Percolation index $queryIndex created") + log.debug("Queries inserted into Percolate index $queryIndex") } - docLevelMonitorQueries.indexDocLevelQueries( - monitor, - monitorId, - monitorMetadata, - refreshPolicy, - indexTimeout - ) - log.debug("Queries inserted into Percolate index $queryIndex") - } - private suspend fun updateMonitor() { - val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, request.monitorId) - try { - val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } - if (!getResponse.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Monitor with ${request.monitorId} is not found", RestStatus.NOT_FOUND) + private suspend fun updateMonitor() { + val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, request.monitorId) + try { + val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } + if (!getResponse.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Monitor with ${request.monitorId} is not found", RestStatus.NOT_FOUND), + ), ) - ) - return - } - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) + return + } + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, + ) + val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) - validateMonitorV1(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return - } + validateMonitorV1(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } - val monitor = scheduledJob as Monitor + val monitor = scheduledJob as Monitor - onGetResponse(monitor) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + onGetResponse(monitor) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } } - } - private suspend fun onGetResponse(currentMonitor: Monitor) { - if (!checkUserPermissionsWithResource(user, currentMonitor.user, actionListener, "monitor", request.monitorId)) { - return - } + private suspend fun onGetResponse(currentMonitor: Monitor) { + if (!checkUserPermissionsWithResource(user, currentMonitor.user, actionListener, "monitor", request.monitorId)) { + return + } - // If both are enabled, use the current existing monitor enabled time, otherwise the next execution will be - // incorrect. - if (request.monitor.enabled && currentMonitor.enabled) - request.monitor = request.monitor.copy(enabledTime = currentMonitor.enabledTime) + // If both are enabled, use the current existing monitor enabled time, otherwise the next execution will be + // incorrect. + if (request.monitor.enabled && currentMonitor.enabled) { + request.monitor = request.monitor.copy(enabledTime = currentMonitor.enabledTime) + } - /** - * On update monitor check which backend roles to associate to the monitor. - * Below are 2 examples of how the logic works - * - * Example 1, say we have a Monitor with backend roles [a, b, c, d] associated with it. - * If I'm User A (non-admin user) and I have backend roles [a, b, c] associated with me and I make a request to update - * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c] and the roles to add are [a, b]. - * The Monitor's backend roles would then be [a, b, d]. - * - * Example 2, say we have a Monitor with backend roles [a, b, c, d] associated with it. - * If I'm User A (admin user) and I have backend roles [a, b, c] associated with me and I make a request to update - * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c, d] and the roles to add are [a, b]. - * The Monitor's backend roles would then be [a, b]. - */ - if (user != null) { - if (request.rbacRoles != null) { - if (isAdmin(user)) { - request.monitor = request.monitor.copy( - user = User(user.name, request.rbacRoles, user.roles, user.customAttributes) - ) + /* + * On update monitor check which backend roles to associate to the monitor. + * Below are 2 examples of how the logic works + * + * Example 1, say we have a Monitor with backend roles [a, b, c, d] associated with it. + * If I'm User A (non-admin user) and I have backend roles [a, b, c] associated with me and I make a request to update + * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c] and the roles to add are [a, b]. + * The Monitor's backend roles would then be [a, b, d]. + * + * Example 2, say we have a Monitor with backend roles [a, b, c, d] associated with it. + * If I'm User A (admin user) and I have backend roles [a, b, c] associated with me and I make a request to update + * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c, d] and the roles to add are [a, b]. + * The Monitor's backend roles would then be [a, b]. + */ + if (user != null) { + if (request.rbacRoles != null) { + if (isAdmin(user)) { + request.monitor = + request.monitor.copy( + user = User(user.name, request.rbacRoles, user.roles, user.customAttributes), + ) + } else { + // rolesToRemove: these are the backend roles to remove from the monitor + val rolesToRemove = user.backendRoles - request.rbacRoles.orEmpty() + // remove the monitor's roles with rolesToRemove and add any roles passed into the request.rbacRoles + val updatedRbac = currentMonitor.user?.backendRoles.orEmpty() - rolesToRemove + request.rbacRoles.orEmpty() + request.monitor = + request.monitor.copy( + user = User(user.name, updatedRbac, user.roles, user.customAttributes), + ) + } } else { - // rolesToRemove: these are the backend roles to remove from the monitor - val rolesToRemove = user.backendRoles - request.rbacRoles.orEmpty() - // remove the monitor's roles with rolesToRemove and add any roles passed into the request.rbacRoles - val updatedRbac = currentMonitor.user?.backendRoles.orEmpty() - rolesToRemove + request.rbacRoles.orEmpty() - request.monitor = request.monitor.copy( - user = User(user.name, updatedRbac, user.roles, user.customAttributes) - ) + request.monitor = + request.monitor + .copy(user = User(user.name, currentMonitor.user!!.backendRoles, user.roles, user.customAttributes)) } - } else { - request.monitor = request.monitor - .copy(user = User(user.name, currentMonitor.user!!.backendRoles, user.roles, user.customAttributes)) + log.debug("Update monitor backend roles to: ${request.monitor.user?.backendRoles}") } - log.debug("Update monitor backend roles to: ${request.monitor.user?.backendRoles}") - } - request.monitor = request.monitor.copy(schemaVersion = IndexUtils.scheduledJobIndexSchemaVersion) - val indexRequest = IndexRequest(SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(request.refreshPolicy) - .source(request.monitor.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) - .id(request.monitorId) - .setIfSeqNo(request.seqNo) - .setIfPrimaryTerm(request.primaryTerm) - .timeout(indexTimeout) - - log.info( - "Updating monitor, ${currentMonitor.id}, from: ${currentMonitor.toXContentWithUser( - jsonBuilder(), - ToXContent.MapParams(mapOf("with_type" to "true")) - )} \n to: ${request.monitor.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))}" - ) + request.monitor = request.monitor.copy(schemaVersion = IndexUtils.scheduledJobIndexSchemaVersion) + val indexRequest = + IndexRequest(SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(request.refreshPolicy) + .source(request.monitor.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .id(request.monitorId) + .setIfSeqNo(request.seqNo) + .setIfPrimaryTerm(request.primaryTerm) + .timeout(indexTimeout) + + log.info( + "Updating monitor, ${currentMonitor.id}, from: ${currentMonitor.toXContentWithUser( + jsonBuilder(), + ToXContent.MapParams(mapOf("with_type" to "true")), + )} \n to: ${request.monitor.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))}", + ) - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = checkShardsFailure(indexResponse) - if (failureReasons != null) { - actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())) - ) - return - } - var isDocLevelMonitorRestarted = false - // Force re-creation of last run context if monitor is of type standard doc-level/threat-intel - // And monitor is re-enabled - if (request.monitor.enabled && !currentMonitor.enabled && - request.monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value) - ) { - isDocLevelMonitorRestarted = true - } + try { + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = checkShardsFailure(indexResponse) + if (failureReasons != null) { + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), + ) + return + } + var isDocLevelMonitorRestarted = false + // Force re-creation of last run context if monitor is of type standard doc-level/threat-intel + // And monitor is re-enabled + if (request.monitor.enabled && !currentMonitor.enabled && + request.monitor.monitorType.endsWith(Monitor.MonitorType.DOC_LEVEL_MONITOR.value) + ) { + isDocLevelMonitorRestarted = true + } - var updatedMetadata: MonitorMetadata - val (metadata, created) = MonitorMetadataService.getOrCreateMetadata( - request.monitor, - forceCreateLastRunContext = isDocLevelMonitorRestarted - ) + var updatedMetadata: MonitorMetadata + val (metadata, created) = + MonitorMetadataService.getOrCreateMetadata( + request.monitor, + forceCreateLastRunContext = isDocLevelMonitorRestarted, + ) + + // Recreate runContext if metadata exists + // Delete and insert all queries from/to queryIndex - // Recreate runContext if metadata exists - // Delete and insert all queries from/to queryIndex - - if (!created && - currentMonitor.isMonitorOfStandardType() && - Monitor.MonitorType.valueOf(currentMonitor.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.DOC_LEVEL_MONITOR - ) { - updatedMetadata = MonitorMetadataService.recreateRunContext(metadata, currentMonitor) - if (docLevelMonitorQueries.docLevelQueryIndexExists(currentMonitor.dataSources)) { - client.suspendUntil { - DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) - .source(currentMonitor.dataSources.queryIndex) - .filter(QueryBuilders.matchQuery("monitor_id", currentMonitor.id)) - .execute(it) + if (!created && + currentMonitor.isMonitorOfStandardType() && + Monitor.MonitorType.valueOf(currentMonitor.monitorType.uppercase(Locale.ROOT)) == + Monitor.MonitorType.DOC_LEVEL_MONITOR + ) { + updatedMetadata = MonitorMetadataService.recreateRunContext(metadata, currentMonitor) + if (docLevelMonitorQueries.docLevelQueryIndexExists(currentMonitor.dataSources)) { + client.suspendUntil { + DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + .source(currentMonitor.dataSources.queryIndex) + .filter(QueryBuilders.matchQuery("monitor_id", currentMonitor.id)) + .execute(it) + } } + indexDocLevelMonitorQueries( + request.monitor, + currentMonitor.id, + updatedMetadata, + request.refreshPolicy, + ) + MonitorMetadataService.upsertMetadata(updatedMetadata, updating = true) } - indexDocLevelMonitorQueries( - request.monitor, - currentMonitor.id, - updatedMetadata, - request.refreshPolicy + actionListener.onResponse( + IndexMonitorResponse( + indexResponse.id, + indexResponse.version, + indexResponse.seqNo, + indexResponse.primaryTerm, + request.monitor, + ), ) - MonitorMetadataService.upsertMetadata(updatedMetadata, updating = true) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) } - actionListener.onResponse( - IndexMonitorResponse( - indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, request.monitor - ) - ) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) } - } - private fun checkShardsFailure(response: IndexResponse): String? { - val failureReasons = StringBuilder() - if (response.shardInfo.failed > 0) { - response.shardInfo.failures.forEach { - entry -> - failureReasons.append(entry.reason()) + private fun checkShardsFailure(response: IndexResponse): String? { + val failureReasons = StringBuilder() + if (response.shardInfo.failed > 0) { + response.shardInfo.failures.forEach { entry -> + failureReasons.append(entry.reason()) + } + return failureReasons.toString() } - return failureReasons.toString() + return null } - return null } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt index ba1e17c57..639bdc7b1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportIndexWorkflowAction.kt @@ -88,737 +88,804 @@ import java.util.stream.Collectors private val log = LogManager.getLogger(TransportIndexWorkflowAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportIndexWorkflowAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val scheduledJobIndices: ScheduledJobIndices, - val clusterService: ClusterService, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val namedWriteableRegistry: NamedWriteableRegistry, -) : HandledTransportAction( - AlertingActions.INDEX_WORKFLOW_ACTION_NAME, transportService, actionFilters, ::IndexWorkflowRequest -), - SecureTransportAction { - - @Volatile - private var maxMonitors = ALERTING_MAX_MONITORS.get(settings) - - @Volatile - private var requestTimeout = REQUEST_TIMEOUT.get(settings) - - @Volatile - private var indexTimeout = INDEX_TIMEOUT.get(settings) - - @Volatile - private var maxActionThrottle = MAX_ACTION_THROTTLE_VALUE.get(settings) - - @Volatile - private var allowList = ALLOW_LIST.get(settings) - - @Volatile - override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_MAX_MONITORS) { maxMonitors = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(REQUEST_TIMEOUT) { requestTimeout = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_ACTION_THROTTLE_VALUE) { maxActionThrottle = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } - listenFilterBySettingChange(clusterService) - } - - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - val transformedRequest = request as? IndexWorkflowRequest - ?: recreateObject(request, namedWriteableRegistry) { - IndexWorkflowRequest(it) - } - - val user = readUserFromThreadContext(client) - - if (!validateUserBackendRoles(user, actionListener)) { - return +class TransportIndexWorkflowAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val scheduledJobIndices: ScheduledJobIndices, + val clusterService: ClusterService, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + AlertingActions.INDEX_WORKFLOW_ACTION_NAME, + transportService, + actionFilters, + ::IndexWorkflowRequest, + ), + SecureTransportAction { + @Volatile + private var maxMonitors = ALERTING_MAX_MONITORS.get(settings) + + @Volatile + private var requestTimeout = REQUEST_TIMEOUT.get(settings) + + @Volatile + private var indexTimeout = INDEX_TIMEOUT.get(settings) + + @Volatile + private var maxActionThrottle = MAX_ACTION_THROTTLE_VALUE.get(settings) + + @Volatile + private var allowList = ALLOW_LIST.get(settings) + + @Volatile + override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_MAX_MONITORS) { maxMonitors = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(REQUEST_TIMEOUT) { requestTimeout = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(MAX_ACTION_THROTTLE_VALUE) { maxActionThrottle = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } + listenFilterBySettingChange(clusterService) } - if ( - user != null && - !isAdmin(user) && - transformedRequest.rbacRoles != null + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, ) { - if (transformedRequest.rbacRoles?.stream()?.anyMatch { !user.backendRoles.contains(it) } == true) { - log.error( - "User specified backend roles, ${transformedRequest.rbacRoles}, " + - "that they don' have access to. User backend roles: ${user.backendRoles}" - ) - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "User specified backend roles that they don't have access to. Contact administrator", - RestStatus.FORBIDDEN - ) - ) - ) - return - } else if (transformedRequest.rbacRoles?.isEmpty() == true) { - log.error( - "Non-admin user are not allowed to specify an empty set of backend roles. " + - "Please don't pass in the parameter or pass in at least one backend role." - ) - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Non-admin user are not allowed to specify an empty set of backend roles.", - RestStatus.FORBIDDEN - ) - ) - ) + val transformedRequest = + request as? IndexWorkflowRequest + ?: recreateObject(request, namedWriteableRegistry) { + IndexWorkflowRequest(it) + } + + val user = readUserFromThreadContext(client) + + if (!validateUserBackendRoles(user, actionListener)) { return } - } - - scope.launch { - try { - validateMonitorAccess( - transformedRequest, - user, - client, - object : ActionListener { - override fun onResponse(response: AcknowledgedResponse) { - // Stash the context and start the workflow creation - client.threadPool().threadContext.stashContext().use { - IndexWorkflowHandler(client, actionListener, transformedRequest, user).resolveUserAndStart() - } - } - override fun onFailure(e: Exception) { - log.error("Error indexing workflow", e) - actionListener.onFailure(e) - } - } - ) - } catch (e: Exception) { - log.error("Failed to create workflow", e) - if (e is IndexNotFoundException) { + if ( + user != null && + !isAdmin(user) && + transformedRequest.rbacRoles != null + ) { + if (transformedRequest.rbacRoles?.stream()?.anyMatch { !user.backendRoles.contains(it) } == true) { + log.error( + "User specified backend roles, ${transformedRequest.rbacRoles}, " + + "that they don' have access to. User backend roles: ${user.backendRoles}", + ) actionListener.onFailure( - OpenSearchStatusException( - "Monitors not found", - RestStatus.NOT_FOUND - ) + AlertingException.wrap( + OpenSearchStatusException( + "User specified backend roles that they don't have access to. Contact administrator", + RestStatus.FORBIDDEN, + ), + ), ) - } else { - actionListener.onFailure(e) + return + } else if (transformedRequest.rbacRoles?.isEmpty() == true) { + log.error( + "Non-admin user are not allowed to specify an empty set of backend roles. " + + "Please don't pass in the parameter or pass in at least one backend role.", + ) + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Non-admin user are not allowed to specify an empty set of backend roles.", + RestStatus.FORBIDDEN, + ), + ), + ) + return } } - } - } - inner class IndexWorkflowHandler( - private val client: Client, - private val actionListener: ActionListener, - private val request: IndexWorkflowRequest, - private val user: User?, - ) { - fun resolveUserAndStart() { scope.launch { - if (user == null) { - // Security is disabled, add empty user to Workflow. user is null for older versions. - request.workflow = request.workflow - .copy(user = User("", listOf(), listOf(), mapOf())) - start() - } else { - request.workflow = request.workflow - .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) - start() - } - } - } - - fun start() { - if (!scheduledJobIndices.scheduledJobIndexExists()) { - scheduledJobIndices.initScheduledJobIndex(object : ActionListener { - override fun onResponse(response: CreateIndexResponse) { - onCreateMappingsResponse(response.isAcknowledged) - } - - override fun onFailure(t: Exception) { - // https://github.com/opensearch-project/alerting/issues/646 - if (ExceptionsHelper.unwrapCause(t) is ResourceAlreadyExistsException) { - scope.launch { - // Wait for the yellow status - val request = ClusterHealthRequest() - .indices(SCHEDULED_JOBS_INDEX) - .waitForYellowStatus() - val response: ClusterHealthResponse = client.suspendUntil { - execute(ClusterHealthAction.INSTANCE, request, it) - } - if (response.isTimedOut) { - log.error("Workflow creation timeout", t) - actionListener.onFailure( - OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy") - ) + try { + validateMonitorAccess( + transformedRequest, + user, + client, + object : ActionListener { + override fun onResponse(response: AcknowledgedResponse) { + // Stash the context and start the workflow creation + client.threadPool().threadContext.stashContext().use { + IndexWorkflowHandler(client, actionListener, transformedRequest, user).resolveUserAndStart() } - // Retry mapping of workflow - onCreateMappingsResponse(true) } - } else { - log.error("Failed to create workflow", t) - actionListener.onFailure(AlertingException.wrap(t)) - } - } - }) - } else if (!IndexUtils.scheduledJobIndexUpdated) { - IndexUtils.updateIndexMapping( - SCHEDULED_JOBS_INDEX, - ScheduledJobIndices.scheduledJobMappings(), clusterService.state(), client.admin().indices(), - object : ActionListener { - override fun onResponse(response: AcknowledgedResponse) { - onUpdateMappingsResponse(response) - } - override fun onFailure(t: Exception) { - log.error("Failed to create workflow", t) - actionListener.onFailure(AlertingException.wrap(t)) - } + override fun onFailure(e: Exception) { + log.error("Error indexing workflow", e) + actionListener.onFailure(e) + } + }, + ) + } catch (e: Exception) { + log.error("Failed to create workflow", e) + if (e is IndexNotFoundException) { + actionListener.onFailure( + OpenSearchStatusException( + "Monitors not found", + RestStatus.NOT_FOUND, + ), + ) + } else { + actionListener.onFailure(e) } - ) - } else { - prepareWorkflowIndexing() + } } } - /** - * This function prepares for indexing a new workflow. - * If this is an update request we can simply update the workflow. Otherwise we first check to see how many monitors already exist, - * and compare this to the [maxMonitorCount]. Requests that breach this threshold will be rejected. - */ - private fun prepareWorkflowIndexing() { - if (request.method == RestRequest.Method.PUT) { - scope.launch { - updateWorkflow() - } - } else { + inner class IndexWorkflowHandler( + private val client: Client, + private val actionListener: ActionListener, + private val request: IndexWorkflowRequest, + private val user: User?, + ) { + fun resolveUserAndStart() { scope.launch { - indexWorkflow() + if (user == null) { + // Security is disabled, add empty user to Workflow. user is null for older versions. + request.workflow = + request.workflow + .copy(user = User("", listOf(), listOf(), mapOf())) + start() + } else { + request.workflow = + request.workflow + .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) + start() + } } } - } - private fun onCreateMappingsResponse(isAcknowledged: Boolean) { - if (isAcknowledged) { - log.info("Created $SCHEDULED_JOBS_INDEX with mappings.") - prepareWorkflowIndexing() - IndexUtils.scheduledJobIndexUpdated() - } else { - log.error("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged", - RestStatus.INTERNAL_SERVER_ERROR - ) + fun start() { + if (!scheduledJobIndices.scheduledJobIndexExists()) { + scheduledJobIndices.initScheduledJobIndex( + object : ActionListener { + override fun onResponse(response: CreateIndexResponse) { + onCreateMappingsResponse(response.isAcknowledged) + } + + override fun onFailure(t: Exception) { + // https://github.com/opensearch-project/alerting/issues/646 + if (ExceptionsHelper.unwrapCause(t) is ResourceAlreadyExistsException) { + scope.launch { + // Wait for the yellow status + val request = + ClusterHealthRequest() + .indices(SCHEDULED_JOBS_INDEX) + .waitForYellowStatus() + val response: ClusterHealthResponse = + client.suspendUntil { + execute(ClusterHealthAction.INSTANCE, request, it) + } + if (response.isTimedOut) { + log.error("Workflow creation timeout", t) + actionListener.onFailure( + OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy"), + ) + } + // Retry mapping of workflow + onCreateMappingsResponse(true) + } + } else { + log.error("Failed to create workflow", t) + actionListener.onFailure(AlertingException.wrap(t)) + } + } + }, ) - ) - } - } + } else if (!IndexUtils.scheduledJobIndexUpdated) { + IndexUtils.updateIndexMapping( + SCHEDULED_JOBS_INDEX, + ScheduledJobIndices.scheduledJobMappings(), + clusterService.state(), + client.admin().indices(), + object : ActionListener { + override fun onResponse(response: AcknowledgedResponse) { + onUpdateMappingsResponse(response) + } - private fun onUpdateMappingsResponse(response: AcknowledgedResponse) { - if (response.isAcknowledged) { - log.info("Updated $SCHEDULED_JOBS_INDEX with mappings.") - IndexUtils.scheduledJobIndexUpdated() - prepareWorkflowIndexing() - } else { - log.error("Update $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Updated $SCHEDULED_JOBS_INDEX mappings call not acknowledged.", - RestStatus.INTERNAL_SERVER_ERROR - ) + override fun onFailure(t: Exception) { + log.error("Failed to create workflow", t) + actionListener.onFailure(AlertingException.wrap(t)) + } + }, ) - ) + } else { + prepareWorkflowIndexing() + } } - } - - private suspend fun indexWorkflow() { - if (user != null) { - val rbacRoles = if (request.rbacRoles == null) user.backendRoles.toSet() - else if (!isAdmin(user)) request.rbacRoles?.intersect(user.backendRoles)?.toSet() - else request.rbacRoles - request.workflow = request.workflow.copy( - user = User(user.name, rbacRoles.orEmpty().toList(), user.roles, user.customAttributes) - ) - log.debug("Created workflow's backend roles: $rbacRoles") + /** + * This function prepares for indexing a new workflow. + * If this is an update request we can simply update the workflow. Otherwise we first check to see how many monitors already exist, + * and compare this to the [maxMonitorCount]. Requests that breach this threshold will be rejected. + */ + private fun prepareWorkflowIndexing() { + if (request.method == RestRequest.Method.PUT) { + scope.launch { + updateWorkflow() + } + } else { + scope.launch { + indexWorkflow() + } + } } - val indexRequest = IndexRequest(SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(request.refreshPolicy) - .source( - request.workflow.toXContentWithUser( - jsonBuilder(), - ToXContent.MapParams(mapOf("with_type" to "true")) + private fun onCreateMappingsResponse(isAcknowledged: Boolean) { + if (isAcknowledged) { + log.info("Created $SCHEDULED_JOBS_INDEX with mappings.") + prepareWorkflowIndexing() + IndexUtils.scheduledJobIndexUpdated() + } else { + log.error("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged", + RestStatus.INTERNAL_SERVER_ERROR, + ), + ), ) - ) - .setIfSeqNo(request.seqNo) - .setIfPrimaryTerm(request.primaryTerm) - .timeout(indexTimeout) + } + } - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = checkShardsFailure(indexResponse) - if (failureReasons != null) { - log.error("Failed to create workflow: $failureReasons") + private fun onUpdateMappingsResponse(response: AcknowledgedResponse) { + if (response.isAcknowledged) { + log.info("Updated $SCHEDULED_JOBS_INDEX with mappings.") + IndexUtils.scheduledJobIndexUpdated() + prepareWorkflowIndexing() + } else { + log.error("Update $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( - failureReasons.toString(), - indexResponse.status() - ) - ) + "Updated $SCHEDULED_JOBS_INDEX mappings call not acknowledged.", + RestStatus.INTERNAL_SERVER_ERROR, + ), + ), ) - return } + } - val createdWorkflow = request.workflow.copy(id = indexResponse.id) - val executionId = CompositeWorkflowRunner.generateExecutionId(false, createdWorkflow) + private suspend fun indexWorkflow() { + if (user != null) { + val rbacRoles = + if (request.rbacRoles == null) { + user.backendRoles.toSet() + } else if (!isAdmin(user)) { + request.rbacRoles?.intersect(user.backendRoles)?.toSet() + } else { + request.rbacRoles + } - val (workflowMetadata, _) = WorkflowMetadataService.getOrCreateWorkflowMetadata( - workflow = createdWorkflow, - skipIndex = false, - executionId = executionId - ) + request.workflow = + request.workflow.copy( + user = User(user.name, rbacRoles.orEmpty().toList(), user.roles, user.customAttributes), + ) + log.debug("Created workflow's backend roles: $rbacRoles") + } - val delegates = (createdWorkflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } - val monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) + val indexRequest = + IndexRequest(SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(request.refreshPolicy) + .source( + request.workflow.toXContentWithUser( + jsonBuilder(), + ToXContent.MapParams(mapOf("with_type" to "true")), + ), + ).setIfSeqNo(request.seqNo) + .setIfPrimaryTerm(request.primaryTerm) + .timeout(indexTimeout) + + try { + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = checkShardsFailure(indexResponse) + if (failureReasons != null) { + log.error("Failed to create workflow: $failureReasons") + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + failureReasons.toString(), + indexResponse.status(), + ), + ), + ) + return + } - for (monitor in monitors) { - var (monitorMetadata, created) = MonitorMetadataService.getOrCreateMetadata( - monitor = monitor, - createWithRunContext = true, - workflowMetadataId = workflowMetadata.id - ) + val createdWorkflow = request.workflow.copy(id = indexResponse.id) + val executionId = CompositeWorkflowRunner.generateExecutionId(false, createdWorkflow) - if (created == false) { - log.warn("Metadata doc id:${monitorMetadata.id} exists, but it shouldn't!") - } + val (workflowMetadata, _) = + WorkflowMetadataService.getOrCreateWorkflowMetadata( + workflow = createdWorkflow, + skipIndex = false, + executionId = executionId, + ) - if ( - Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.DOC_LEVEL_MONITOR - ) { - val oldMonitorMetadata = MonitorMetadataService.getMetadata(monitor) - monitorMetadata = monitorMetadata.copy(sourceToQueryIndexMapping = oldMonitorMetadata!!.sourceToQueryIndexMapping) - } - // When inserting queries in queryIndex we could update sourceToQueryIndexMapping - MonitorMetadataService.upsertMetadata(monitorMetadata, updating = true) - } - actionListener.onResponse( - IndexWorkflowResponse( - indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, request.workflow.copy(id = indexResponse.id) - ) - ) - } catch (t: Exception) { - log.error("Failed to index workflow", t) - actionListener.onFailure(AlertingException.wrap(t)) - } - } + val delegates = (createdWorkflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } + val monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) - private suspend fun updateWorkflow() { - val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, request.workflowId) - try { - val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } - if (!getResponse.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Workflow with ${request.workflowId} is not found", - RestStatus.NOT_FOUND + for (monitor in monitors) { + var (monitorMetadata, created) = + MonitorMetadataService.getOrCreateMetadata( + monitor = monitor, + createWithRunContext = true, + workflowMetadataId = workflowMetadata.id, ) - ) + + if (created == false) { + log.warn("Metadata doc id:${monitorMetadata.id} exists, but it shouldn't!") + } + + if ( + Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.DOC_LEVEL_MONITOR + ) { + val oldMonitorMetadata = MonitorMetadataService.getMetadata(monitor) + monitorMetadata = + monitorMetadata.copy(sourceToQueryIndexMapping = oldMonitorMetadata!!.sourceToQueryIndexMapping) + } + // When inserting queries in queryIndex we could update sourceToQueryIndexMapping + MonitorMetadataService.upsertMetadata(monitorMetadata, updating = true) + } + actionListener.onResponse( + IndexWorkflowResponse( + indexResponse.id, + indexResponse.version, + indexResponse.seqNo, + indexResponse.primaryTerm, + request.workflow.copy(id = indexResponse.id), + ), ) - return - } - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) - validateMonitorV1(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return + } catch (t: Exception) { + log.error("Failed to index workflow", t) + actionListener.onFailure(AlertingException.wrap(t)) } - val workflow = scheduledJob as Workflow - onGetResponse(workflow) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) } - } - - private suspend fun onGetResponse(currentWorkflow: Workflow) { - if (!checkUserPermissionsWithResource( - user, - currentWorkflow.user, - actionListener, - "workflow", - request.workflowId - ) - ) { - return - } - - // If both are enabled, use the current existing monitor enabled time, otherwise the next execution will be - // incorrect. - if (request.workflow.enabled && currentWorkflow.enabled) - request.workflow = request.workflow.copy(enabledTime = currentWorkflow.enabledTime) - /** - * On update workflow check which backend roles to associate to the workflow. - * Below are 2 examples of how the logic works - * - * Example 1, say we have a Workflow with backend roles [a, b, c, d] associated with it. - * If I'm User A (non-admin user) and I have backend roles [a, b, c] associated with me and I make a request to update - * the Workflow's backend roles to [a, b]. This would mean that the roles to remove are [c] and the roles to add are [a, b]. - * The Workflow's backend roles would then be [a, b, d]. - * - * Example 2, say we have a Workflow with backend roles [a, b, c, d] associated with it. - * If I'm User A (admin user) and I have backend roles [a, b, c] associated with me and I make a request to update - * the Workflow's backend roles to [a, b]. This would mean that the roles to remove are [c, d] and the roles to add are [a, b]. - * The Workflow's backend roles would then be [a, b]. - */ - if (user != null) { - if (request.rbacRoles != null) { - if (isAdmin(user)) { - request.workflow = request.workflow.copy( - user = User(user.name, request.rbacRoles, user.roles, user.customAttributes) - ) - } else { - // rolesToRemove: these are the backend roles to remove from the monitor - val rolesToRemove = user.backendRoles - request.rbacRoles.orEmpty() - // remove the monitor's roles with rolesToRemove and add any roles passed into the request.rbacRoles - val updatedRbac = - currentWorkflow.user?.backendRoles.orEmpty() - rolesToRemove + request.rbacRoles.orEmpty() - request.workflow = request.workflow.copy( - user = User(user.name, updatedRbac, user.roles, user.customAttributes) + private suspend fun updateWorkflow() { + val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, request.workflowId) + try { + val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } + if (!getResponse.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Workflow with ${request.workflowId} is not found", + RestStatus.NOT_FOUND, + ), + ), ) + return } - } else { - request.workflow = request.workflow - .copy( - user = User( - user.name, - currentWorkflow.user!!.backendRoles, - user.roles, - user.customAttributes - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, ) + val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) + validateMonitorV1(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } + val workflow = scheduledJob as Workflow + onGetResponse(workflow) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) } - log.debug("Update workflow backend roles to: ${request.workflow.user?.backendRoles}") } - request.workflow = request.workflow.copy(schemaVersion = IndexUtils.scheduledJobIndexSchemaVersion) - val indexRequest = IndexRequest(SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(request.refreshPolicy) - .source( - request.workflow.toXContentWithUser( - jsonBuilder(), - ToXContent.MapParams(mapOf("with_type" to "true")) - ) - ) - .id(request.workflowId) - .setIfSeqNo(request.seqNo) - .setIfPrimaryTerm(request.primaryTerm) - .timeout(indexTimeout) - - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = checkShardsFailure(indexResponse) - if (failureReasons != null) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - failureReasons.toString(), - indexResponse.status() - ) - ) + private suspend fun onGetResponse(currentWorkflow: Workflow) { + if (!checkUserPermissionsWithResource( + user, + currentWorkflow.user, + actionListener, + "workflow", + request.workflowId, ) + ) { return } - val updatedWorkflow = request.workflow.copy(id = indexResponse.id) - val executionId = CompositeWorkflowRunner.generateExecutionId(false, updatedWorkflow) + // If both are enabled, use the current existing monitor enabled time, otherwise the next execution will be + // incorrect. + if (request.workflow.enabled && currentWorkflow.enabled) { + request.workflow = request.workflow.copy(enabledTime = currentWorkflow.enabledTime) + } - val (workflowMetadata, _) = WorkflowMetadataService.getOrCreateWorkflowMetadata( - workflow = updatedWorkflow, - skipIndex = false, - executionId = executionId - ) + /* + * On update workflow check which backend roles to associate to the workflow. + * Below are 2 examples of how the logic works + * + * Example 1, say we have a Workflow with backend roles [a, b, c, d] associated with it. + * If I'm User A (non-admin user) and I have backend roles [a, b, c] associated with me and I make a request to update + * the Workflow's backend roles to [a, b]. This would mean that the roles to remove are [c] and the roles to add are [a, b]. + * The Workflow's backend roles would then be [a, b, d]. + * + * Example 2, say we have a Workflow with backend roles [a, b, c, d] associated with it. + * If I'm User A (admin user) and I have backend roles [a, b, c] associated with me and I make a request to update + * the Workflow's backend roles to [a, b]. This would mean that the roles to remove are [c, d] and the roles to add are [a, b]. + * The Workflow's backend roles would then be [a, b]. + */ + if (user != null) { + if (request.rbacRoles != null) { + if (isAdmin(user)) { + request.workflow = + request.workflow.copy( + user = User(user.name, request.rbacRoles, user.roles, user.customAttributes), + ) + } else { + // rolesToRemove: these are the backend roles to remove from the monitor + val rolesToRemove = user.backendRoles - request.rbacRoles.orEmpty() + // remove the monitor's roles with rolesToRemove and add any roles passed into the request.rbacRoles + val updatedRbac = + currentWorkflow.user?.backendRoles.orEmpty() - rolesToRemove + request.rbacRoles.orEmpty() + request.workflow = + request.workflow.copy( + user = User(user.name, updatedRbac, user.roles, user.customAttributes), + ) + } + } else { + request.workflow = + request.workflow + .copy( + user = + User( + user.name, + currentWorkflow.user!!.backendRoles, + user.roles, + user.customAttributes, + ), + ) + } + log.debug("Update workflow backend roles to: ${request.workflow.user?.backendRoles}") + } - val delegates = (updatedWorkflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } - val monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) + request.workflow = request.workflow.copy(schemaVersion = IndexUtils.scheduledJobIndexSchemaVersion) + val indexRequest = + IndexRequest(SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(request.refreshPolicy) + .source( + request.workflow.toXContentWithUser( + jsonBuilder(), + ToXContent.MapParams(mapOf("with_type" to "true")), + ), + ).id(request.workflowId) + .setIfSeqNo(request.seqNo) + .setIfPrimaryTerm(request.primaryTerm) + .timeout(indexTimeout) + + try { + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = checkShardsFailure(indexResponse) + if (failureReasons != null) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + failureReasons.toString(), + indexResponse.status(), + ), + ), + ) + return + } - for (monitor in monitors) { - var isWorkflowRestarted = false + val updatedWorkflow = request.workflow.copy(id = indexResponse.id) + val executionId = CompositeWorkflowRunner.generateExecutionId(false, updatedWorkflow) - if (request.workflow.enabled && !currentWorkflow.enabled) { - isWorkflowRestarted = true - } + val (workflowMetadata, _) = + WorkflowMetadataService.getOrCreateWorkflowMetadata( + workflow = updatedWorkflow, + skipIndex = false, + executionId = executionId, + ) - val (monitorMetadata, created) = MonitorMetadataService.getOrCreateMetadata( - monitor = monitor, - createWithRunContext = true, - workflowMetadataId = workflowMetadata.id, - forceCreateLastRunContext = isWorkflowRestarted - ) + val delegates = (updatedWorkflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } + val monitors = monitorCtx.workflowService!!.getMonitorsById(delegates.map { it.monitorId }, delegates.size) + + for (monitor in monitors) { + var isWorkflowRestarted = false - if (!created && - Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.DOC_LEVEL_MONITOR - ) { - var updatedMetadata = MonitorMetadataService.recreateRunContext(monitorMetadata, monitor) - val oldMonitorMetadata = MonitorMetadataService.getMetadata(monitor) - updatedMetadata = updatedMetadata.copy(sourceToQueryIndexMapping = oldMonitorMetadata!!.sourceToQueryIndexMapping) - MonitorMetadataService.upsertMetadata(updatedMetadata, updating = true) + if (request.workflow.enabled && !currentWorkflow.enabled) { + isWorkflowRestarted = true + } + + val (monitorMetadata, created) = + MonitorMetadataService.getOrCreateMetadata( + monitor = monitor, + createWithRunContext = true, + workflowMetadataId = workflowMetadata.id, + forceCreateLastRunContext = isWorkflowRestarted, + ) + + if (!created && + Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.DOC_LEVEL_MONITOR + ) { + var updatedMetadata = MonitorMetadataService.recreateRunContext(monitorMetadata, monitor) + val oldMonitorMetadata = MonitorMetadataService.getMetadata(monitor) + updatedMetadata = + updatedMetadata.copy(sourceToQueryIndexMapping = oldMonitorMetadata!!.sourceToQueryIndexMapping) + MonitorMetadataService.upsertMetadata(updatedMetadata, updating = true) + } } - } - actionListener.onResponse( - IndexWorkflowResponse( - indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, request.workflow.copy(id = currentWorkflow.id) + actionListener.onResponse( + IndexWorkflowResponse( + indexResponse.id, + indexResponse.version, + indexResponse.seqNo, + indexResponse.primaryTerm, + request.workflow.copy(id = currentWorkflow.id), + ), ) - ) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } } - } - private fun checkShardsFailure(response: IndexResponse): String? { - val failureReasons = StringBuilder() - if (response.shardInfo.failed > 0) { - response.shardInfo.failures.forEach { entry -> - failureReasons.append(entry.reason()) + private fun checkShardsFailure(response: IndexResponse): String? { + val failureReasons = StringBuilder() + if (response.shardInfo.failed > 0) { + response.shardInfo.failures.forEach { entry -> + failureReasons.append(entry.reason()) + } + return failureReasons.toString() } - return failureReasons.toString() + return null } - return null } - } - - private fun validateChainedMonitorFindingsMonitors(delegates: List, monitorDelegates: List) { - infix fun List.equalsIgnoreOrder(other: List) = - this.size == other.size && this.toSet() == other.toSet() - val monitorsById = monitorDelegates.associateBy { it.id } - delegates.forEach { - - val delegateMonitor = monitorsById[it.monitorId] ?: throw AlertingException.wrap( - IllegalArgumentException("Delegate monitor ${it.monitorId} doesn't exist") - ) - if (it.chainedMonitorFindings != null) { - val chainedMonitorIds: MutableList = mutableListOf() - if (it.chainedMonitorFindings!!.monitorId.isNullOrBlank()) { - chainedMonitorIds.addAll(it.chainedMonitorFindings!!.monitorIds) - } else { - chainedMonitorIds.add(it.chainedMonitorFindings!!.monitorId!!) - } - chainedMonitorIds.forEach { chainedMonitorId -> - val chainedFindingMonitor = - monitorsById[chainedMonitorId] ?: throw AlertingException.wrap( - IllegalArgumentException("Chained finding monitor $chainedMonitorId doesn't exist") - ) + private fun validateChainedMonitorFindingsMonitors( + delegates: List, + monitorDelegates: List, + ) { + infix fun List.equalsIgnoreOrder(other: List) = this.size == other.size && this.toSet() == other.toSet() - if (chainedFindingMonitor.isQueryLevelMonitor()) { - throw AlertingException.wrap(IllegalArgumentException("Query level monitor can't be part of chained findings")) + val monitorsById = monitorDelegates.associateBy { it.id } + delegates.forEach { + val delegateMonitor = + monitorsById[it.monitorId] ?: throw AlertingException.wrap( + IllegalArgumentException("Delegate monitor ${it.monitorId} doesn't exist"), + ) + if (it.chainedMonitorFindings != null) { + val chainedMonitorIds: MutableList = mutableListOf() + if (it.chainedMonitorFindings!!.monitorId.isNullOrBlank()) { + chainedMonitorIds.addAll(it.chainedMonitorFindings!!.monitorIds) + } else { + chainedMonitorIds.add(it.chainedMonitorFindings!!.monitorId!!) } + chainedMonitorIds.forEach { chainedMonitorId -> + val chainedFindingMonitor = + monitorsById[chainedMonitorId] ?: throw AlertingException.wrap( + IllegalArgumentException("Chained finding monitor $chainedMonitorId doesn't exist"), + ) + + if (chainedFindingMonitor.isQueryLevelMonitor()) { + throw AlertingException.wrap(IllegalArgumentException("Query level monitor can't be part of chained findings")) + } - val delegateMonitorIndices = getMonitorIndices(delegateMonitor) + val delegateMonitorIndices = getMonitorIndices(delegateMonitor) - val chainedMonitorIndices = getMonitorIndices(chainedFindingMonitor) + val chainedMonitorIndices = getMonitorIndices(chainedFindingMonitor) - if (!delegateMonitorIndices.containsAll(chainedMonitorIndices)) { - throw AlertingException.wrap( - IllegalArgumentException( - "Delegate monitor indices ${delegateMonitorIndices.joinToString()} " + - "doesn't query all of chained findings monitor's indices ${chainedMonitorIndices.joinToString()}}" + if (!delegateMonitorIndices.containsAll(chainedMonitorIndices)) { + throw AlertingException.wrap( + IllegalArgumentException( + "Delegate monitor indices ${delegateMonitorIndices.joinToString()} " + + "doesn't query all of chained findings monitor's indices ${chainedMonitorIndices.joinToString()}}", + ), ) - ) + } } } } } - } - /** - * Returns list of indices for the given monitor depending on it's type - */ - private fun getMonitorIndices(monitor: Monitor): List { - if (monitor.isMonitorOfStandardType()) { - return when (Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT))) { - Monitor.MonitorType.DOC_LEVEL_MONITOR -> (monitor.inputs[0] as DocLevelMonitorInput).indices - Monitor.MonitorType.BUCKET_LEVEL_MONITOR -> monitor.inputs.flatMap { s -> (s as SearchInput).indices } - Monitor.MonitorType.QUERY_LEVEL_MONITOR -> { - if (isADMonitor(monitor)) monitor.inputs.flatMap { s -> (s as SearchInput).indices } - else { - val indices = mutableListOf() - for (input in monitor.inputs) { - when (input) { - is SearchInput -> indices.addAll(input.indices) - else -> indices + /** + * Returns list of indices for the given monitor depending on it's type + */ + private fun getMonitorIndices(monitor: Monitor): List { + if (monitor.isMonitorOfStandardType()) { + return when (Monitor.MonitorType.valueOf(monitor.monitorType.uppercase(Locale.ROOT))) { + Monitor.MonitorType.DOC_LEVEL_MONITOR -> { + (monitor.inputs[0] as DocLevelMonitorInput).indices + } + + Monitor.MonitorType.BUCKET_LEVEL_MONITOR -> { + monitor.inputs.flatMap { s -> (s as SearchInput).indices } + } + + Monitor.MonitorType.QUERY_LEVEL_MONITOR -> { + if (isADMonitor(monitor)) { + monitor.inputs.flatMap { s -> (s as SearchInput).indices } + } else { + val indices = mutableListOf() + for (input in monitor.inputs) { + when (input) { + is SearchInput -> indices.addAll(input.indices) + else -> indices + } } + indices } - indices } - } - else -> emptyList() + else -> { + emptyList() + } + } + } else { + return emptyList() } - } else { - return emptyList() } - } - private fun validateDelegateMonitorsExist( - monitorIds: List, - delegateMonitors: List, - ) { - val reqMonitorIds: MutableList = monitorIds as MutableList - delegateMonitors.forEach { - reqMonitorIds.remove(it.id) - } - if (reqMonitorIds.isNotEmpty()) { - throw AlertingException.wrap(IllegalArgumentException(("${reqMonitorIds.joinToString()} are not valid monitor ids"))) + private fun validateDelegateMonitorsExist( + monitorIds: List, + delegateMonitors: List, + ) { + val reqMonitorIds: MutableList = monitorIds as MutableList + delegateMonitors.forEach { + reqMonitorIds.remove(it.id) + } + if (reqMonitorIds.isNotEmpty()) { + throw AlertingException.wrap(IllegalArgumentException(("${reqMonitorIds.joinToString()} are not valid monitor ids"))) + } } - } - /** - * Validates monitor and indices access - * 1. Validates the monitor access (if the filterByEnabled is set to true - adds backend role filter) as admin - * 2. Unstashes the context and checks if the user can access the monitor indices - */ - private suspend fun validateMonitorAccess( - request: IndexWorkflowRequest, - user: User?, - client: Client, - actionListener: ActionListener, - ) { - val compositeInput = request.workflow.inputs[0] as CompositeInput - val monitorIds = compositeInput.sequence.delegates.stream().map { it.monitorId }.collect(Collectors.toList()) - val query = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitorIds)) - val searchSource = SearchSourceBuilder().query(query) - val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX).source(searchSource) - - if (user != null && !isAdmin(user) && filterByEnabled) { - addFilter(user, searchRequest.source(), "monitor.user.backend_roles.keyword") - } + /** + * Validates monitor and indices access + * 1. Validates the monitor access (if the filterByEnabled is set to true - adds backend role filter) as admin + * 2. Unstashes the context and checks if the user can access the monitor indices + */ + private suspend fun validateMonitorAccess( + request: IndexWorkflowRequest, + user: User?, + client: Client, + actionListener: ActionListener, + ) { + val compositeInput = request.workflow.inputs[0] as CompositeInput + val monitorIds = + compositeInput.sequence.delegates + .stream() + .map { it.monitorId } + .collect(Collectors.toList()) + val query = QueryBuilders.boolQuery().filter(QueryBuilders.termsQuery("_id", monitorIds)) + val searchSource = SearchSourceBuilder().query(query) + val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX).source(searchSource) + + if (user != null && !isAdmin(user) && filterByEnabled) { + addFilter(user, searchRequest.source(), "monitor.user.backend_roles.keyword") + } - val searchMonitorResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } + val searchMonitorResponse: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } - if (searchMonitorResponse.isTimedOut) { - throw OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy") - } - val monitors = mutableListOf() - for (hit in searchMonitorResponse.hits) { - XContentType.JSON.xContent().createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, hit.sourceAsString - ).use { hitsParser -> - val scheduledJob = ScheduledJob.parse(hitsParser, hit.id, hit.version) - validateMonitorV1(scheduledJob)?.let { - throw OpenSearchException(it) - } - val monitor = scheduledJob as Monitor - monitors.add(monitor) + if (searchMonitorResponse.isTimedOut) { + throw OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy") } - } - if (monitors.isEmpty()) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "User doesn't have read permissions for one or more configured monitors ${monitorIds.joinToString()}", - RestStatus.FORBIDDEN - ) - ) - ) - return - } - // Validate delegates and it's chained findings - try { - validateDelegateMonitorsExist(monitorIds, monitors) - validateChainedMonitorFindingsMonitors(compositeInput.sequence.delegates, monitors) - } catch (e: Exception) { - actionListener.onFailure(e) - return - } - val indices = getMonitorIndices(monitors) - - val indicesSearchRequest = SearchRequest().indices(*indices.toTypedArray()) - .source(SearchSourceBuilder.searchSource().size(1).query(QueryBuilders.matchAllQuery())) - - if (user != null && filterByEnabled) { - // Unstash the context and check if user with specified roles has indices access - withClosableContext( - InjectorContextElement( - user.name.plus(UUID.randomUUID().toString()), - settings, - client.threadPool().threadContext, - user.roles, - user + val monitors = mutableListOf() + for (hit in searchMonitorResponse.hits) { + XContentType.JSON + .xContent() + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceAsString, + ).use { hitsParser -> + val scheduledJob = ScheduledJob.parse(hitsParser, hit.id, hit.version) + validateMonitorV1(scheduledJob)?.let { + throw OpenSearchException(it) + } + val monitor = scheduledJob as Monitor + monitors.add(monitor) + } + } + if (monitors.isEmpty()) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "User doesn't have read permissions for one or more configured monitors ${monitorIds.joinToString()}", + RestStatus.FORBIDDEN, + ), + ), ) - ) { + return + } + // Validate delegates and it's chained findings + try { + validateDelegateMonitorsExist(monitorIds, monitors) + validateChainedMonitorFindingsMonitors(compositeInput.sequence.delegates, monitors) + } catch (e: Exception) { + actionListener.onFailure(e) + return + } + val indices = getMonitorIndices(monitors) + + val indicesSearchRequest = + SearchRequest() + .indices(*indices.toTypedArray()) + .source(SearchSourceBuilder.searchSource().size(1).query(QueryBuilders.matchAllQuery())) + + if (user != null && filterByEnabled) { + // Unstash the context and check if user with specified roles has indices access + withClosableContext( + InjectorContextElement( + user.name.plus(UUID.randomUUID().toString()), + settings, + client.threadPool().threadContext, + user.roles, + user, + ), + ) { + checkIndicesAccess(client, indicesSearchRequest, indices, actionListener) + } + } else { checkIndicesAccess(client, indicesSearchRequest, indices, actionListener) } - } else { - checkIndicesAccess(client, indicesSearchRequest, indices, actionListener) } - } - /** - * Checks if the client can access the given indices - */ - private fun checkIndicesAccess( - client: Client, - indicesSearchRequest: SearchRequest?, - indices: MutableList, - actionListener: ActionListener, - ) { - client.search( - indicesSearchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse?) { - actionListener.onResponse(AcknowledgedResponse(true)) - } + /** + * Checks if the client can access the given indices + */ + private fun checkIndicesAccess( + client: Client, + indicesSearchRequest: SearchRequest?, + indices: MutableList, + actionListener: ActionListener, + ) { + client.search( + indicesSearchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse?) { + actionListener.onResponse(AcknowledgedResponse(true)) + } - override fun onFailure(e: Exception) { - log.error("Error accessing the monitor indices", e) - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "User doesn't have read permissions for one or more configured index ${indices.joinToString()}", - RestStatus.FORBIDDEN - ) + override fun onFailure(e: Exception) { + log.error("Error accessing the monitor indices", e) + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "User doesn't have read permissions for one or more configured index ${indices.joinToString()}", + RestStatus.FORBIDDEN, + ), + ), ) - ) - } - } - ) - } + } + }, + ) + } - /** - * Extract indices from monitors - */ - private fun getMonitorIndices(monitors: List): MutableList { - val indices = mutableListOf() + /** + * Extract indices from monitors + */ + private fun getMonitorIndices(monitors: List): MutableList { + val indices = mutableListOf() - val searchInputs = - monitors.flatMap { monitor -> - monitor.inputs.filter { - it.name() == SearchInput.SEARCH_FIELD || it.name() == DocLevelMonitorInput.DOC_LEVEL_INPUT_FIELD + val searchInputs = + monitors.flatMap { monitor -> + monitor.inputs.filter { + it.name() == SearchInput.SEARCH_FIELD || it.name() == DocLevelMonitorInput.DOC_LEVEL_INPUT_FIELD + } } + searchInputs.forEach { + val inputIndices = + if (it.name() == SearchInput.SEARCH_FIELD) { + (it as SearchInput).indices + } else { + (it as DocLevelMonitorInput).indices + } + indices.addAll(inputIndices) } - searchInputs.forEach { - val inputIndices = if (it.name() == SearchInput.SEARCH_FIELD) (it as SearchInput).indices - else (it as DocLevelMonitorInput).indices - indices.addAll(inputIndices) + return indices } - return indices } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchAlertingCommentAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchAlertingCommentAction.kt index 9e8c3d153..c76c72f14 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchAlertingCommentAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchAlertingCommentAction.kt @@ -49,135 +49,164 @@ import java.io.IOException private val log = LogManager.getLogger(TransportSearchAlertingCommentAction::class.java) private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) -class TransportSearchAlertingCommentAction @Inject constructor( - transportService: TransportService, - val settings: Settings, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val namedWriteableRegistry: NamedWriteableRegistry -) : HandledTransportAction( - AlertingActions.SEARCH_COMMENTS_ACTION_NAME, transportService, actionFilters, ::SearchRequest -), - SecureTransportAction { - - @Volatile private var alertingCommentsEnabled = AlertingSettings.ALERTING_COMMENTS_ENABLED.get(settings) - @Volatile override var filterByEnabled: Boolean = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERTING_COMMENTS_ENABLED) { - alertingCommentsEnabled = it +class TransportSearchAlertingCommentAction + @Inject + constructor( + transportService: TransportService, + val settings: Settings, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + AlertingActions.SEARCH_COMMENTS_ACTION_NAME, + transportService, + actionFilters, + ::SearchRequest, + ), + SecureTransportAction { + @Volatile private var alertingCommentsEnabled = AlertingSettings.ALERTING_COMMENTS_ENABLED.get(settings) + + @Volatile override var filterByEnabled: Boolean = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(AlertingSettings.ALERTING_COMMENTS_ENABLED) { + alertingCommentsEnabled = it + } + listenFilterBySettingChange(clusterService) } - listenFilterBySettingChange(clusterService) - } - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - // validate feature flag enabled - if (!alertingCommentsEnabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Comments for Alerting is currently disabled", RestStatus.FORBIDDEN), + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + // validate feature flag enabled + if (!alertingCommentsEnabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Comments for Alerting is currently disabled", RestStatus.FORBIDDEN), + ), ) - ) - return - } - - val transformedRequest = request as? SearchCommentRequest - ?: recreateObject(request, namedWriteableRegistry) { - SearchCommentRequest(it) + return } - val searchSourceBuilder = transformedRequest.searchRequest.source() - .seqNoAndPrimaryTerm(true) - .version(true) - val queryBuilder = if (searchSourceBuilder.query() == null) BoolQueryBuilder() - else QueryBuilders.boolQuery().must(searchSourceBuilder.query()) + val transformedRequest = + request as? SearchCommentRequest + ?: recreateObject(request, namedWriteableRegistry) { + SearchCommentRequest(it) + } + + val searchSourceBuilder = + transformedRequest.searchRequest + .source() + .seqNoAndPrimaryTerm(true) + .version(true) + val queryBuilder = + if (searchSourceBuilder.query() == null) { + BoolQueryBuilder() + } else { + QueryBuilders.boolQuery().must(searchSourceBuilder.query()) + } - searchSourceBuilder.query(queryBuilder) - .seqNoAndPrimaryTerm(true) - .version(true) + searchSourceBuilder + .query(queryBuilder) + .seqNoAndPrimaryTerm(true) + .version(true) - val user = readUserFromThreadContext(client) - client.threadPool().threadContext.stashContext().use { - scope.launch { - resolve(transformedRequest, actionListener, user) + val user = readUserFromThreadContext(client) + client.threadPool().threadContext.stashContext().use { + scope.launch { + resolve(transformedRequest, actionListener, user) + } } } - } - suspend fun resolve(searchCommentRequest: SearchCommentRequest, actionListener: ActionListener, user: User?) { - if (user == null) { - // user is null when: 1/ security is disabled. 2/when user is super-admin. - search(searchCommentRequest.searchRequest, actionListener) - } else if (!doFilterForUser(user)) { - // security is enabled and filterby is disabled. - search(searchCommentRequest.searchRequest, actionListener) - } else { - // security is enabled and filterby is enabled. - try { - log.debug("Filtering result by: {}", user.backendRoles) - - // first retrieve all Alert IDs current User can see after filtering by backend roles - val alertIDs = getFilteredAlertIDs(user) - - // then filter the returned Comments based on the Alert IDs they're allowed to see - val queryBuilder = searchCommentRequest.searchRequest.source().query() as BoolQueryBuilder - searchCommentRequest.searchRequest.source().query( - queryBuilder.filter( - QueryBuilders.termsQuery(Comment.ENTITY_ID_FIELD, alertIDs) + suspend fun resolve( + searchCommentRequest: SearchCommentRequest, + actionListener: ActionListener, + user: User?, + ) { + if (user == null) { + // user is null when: 1/ security is disabled. 2/when user is super-admin. + search(searchCommentRequest.searchRequest, actionListener) + } else if (!doFilterForUser(user)) { + // security is enabled and filterby is disabled. + search(searchCommentRequest.searchRequest, actionListener) + } else { + // security is enabled and filterby is enabled. + try { + log.debug("Filtering result by: {}", user.backendRoles) + + // first retrieve all Alert IDs current User can see after filtering by backend roles + val alertIDs = getFilteredAlertIDs(user) + + // then filter the returned Comments based on the Alert IDs they're allowed to see + val queryBuilder = searchCommentRequest.searchRequest.source().query() as BoolQueryBuilder + searchCommentRequest.searchRequest.source().query( + queryBuilder.filter( + QueryBuilders.termsQuery(Comment.ENTITY_ID_FIELD, alertIDs), + ), ) - ) - search(searchCommentRequest.searchRequest, actionListener) - } catch (ex: IOException) { - actionListener.onFailure(AlertingException.wrap(ex)) + search(searchCommentRequest.searchRequest, actionListener) + } catch (ex: IOException) { + actionListener.onFailure(AlertingException.wrap(ex)) + } } } - } - fun search(searchRequest: SearchRequest, actionListener: ActionListener) { - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - actionListener.onResponse(response) - } + fun search( + searchRequest: SearchRequest, + actionListener: ActionListener, + ) { + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + actionListener.onResponse(response) + } + + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) + } - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + // retrieve the IDs of all Alerts after filtering by current User's + // backend roles + private suspend fun getFilteredAlertIDs(user: User): List { + val queryBuilder = + QueryBuilders + .boolQuery() + .filter(QueryBuilders.termsQuery("monitor_user.backend_roles.keyword", user.backendRoles)) + val searchSourceBuilder = + SearchSourceBuilder() + .version(true) + .seqNoAndPrimaryTerm(true) + .query(queryBuilder) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(ALL_ALERT_INDEX_PATTERN) + // .preference(Preference.PRIMARY_FIRST.type()) // expensive, be careful + + val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } + val alertIDs = + searchResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert.id } - } - ) - } - // retrieve the IDs of all Alerts after filtering by current User's - // backend roles - private suspend fun getFilteredAlertIDs(user: User): List { - val queryBuilder = QueryBuilders - .boolQuery() - .filter(QueryBuilders.termsQuery("monitor_user.backend_roles.keyword", user.backendRoles)) - val searchSourceBuilder = - SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .query(queryBuilder) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(ALL_ALERT_INDEX_PATTERN) - // .preference(Preference.PRIMARY_FIRST.type()) // expensive, be careful - - val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - val alertIDs = searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert.id + return alertIDs } - - return alertIDs } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailAccountAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailAccountAction.kt index b63d9a488..ac384c851 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailAccountAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailAccountAction.kt @@ -24,49 +24,56 @@ import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import org.opensearch.transport.client.Client -class TransportSearchEmailAccountAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - settings: Settings -) : HandledTransportAction( - SearchEmailAccountAction.NAME, transportService, actionFilters, ::SearchRequest -) { +class TransportSearchEmailAccountAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + ) : HandledTransportAction( + SearchEmailAccountAction.NAME, + transportService, + actionFilters, + ::SearchRequest, + ) { + @Volatile private var allowList = ALLOW_LIST.get(settings) - @Volatile private var allowList = ALLOW_LIST.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } - } - - override fun doExecute(task: Task, searchRequest: SearchRequest, actionListener: ActionListener) { + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } + } - if (!allowList.contains(DestinationType.EMAIL.value)) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", - RestStatus.FORBIDDEN - ) + override fun doExecute( + task: Task, + searchRequest: SearchRequest, + actionListener: ActionListener, + ) { + if (!allowList.contains(DestinationType.EMAIL.value)) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", + RestStatus.FORBIDDEN, + ), + ), ) - ) - return - } + return + } - client.threadPool().threadContext.stashContext().use { - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - actionListener.onResponse(response) - } + client.threadPool().threadContext.stashContext().use { + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + actionListener.onResponse(response) + } - override fun onFailure(e: Exception) { - actionListener.onFailure(e) - } - } - ) + override fun onFailure(e: Exception) { + actionListener.onFailure(e) + } + }, + ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailGroupAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailGroupAction.kt index f6b3b92d1..c38bd18c8 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailGroupAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchEmailGroupAction.kt @@ -24,49 +24,56 @@ import org.opensearch.tasks.Task import org.opensearch.transport.TransportService import org.opensearch.transport.client.Client -class TransportSearchEmailGroupAction @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - settings: Settings -) : HandledTransportAction( - SearchEmailGroupAction.NAME, transportService, actionFilters, ::SearchRequest -) { +class TransportSearchEmailGroupAction + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + ) : HandledTransportAction( + SearchEmailGroupAction.NAME, + transportService, + actionFilters, + ::SearchRequest, + ) { + @Volatile private var allowList = ALLOW_LIST.get(settings) - @Volatile private var allowList = ALLOW_LIST.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } - } - - override fun doExecute(task: Task, searchRequest: SearchRequest, actionListener: ActionListener) { + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALLOW_LIST) { allowList = it } + } - if (!allowList.contains(DestinationType.EMAIL.value)) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", - RestStatus.FORBIDDEN - ) + override fun doExecute( + task: Task, + searchRequest: SearchRequest, + actionListener: ActionListener, + ) { + if (!allowList.contains(DestinationType.EMAIL.value)) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "This API is blocked since Destination type [${DestinationType.EMAIL}] is not allowed", + RestStatus.FORBIDDEN, + ), + ), ) - ) - return - } + return + } - client.threadPool().threadContext.stashContext().use { - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - actionListener.onResponse(response) - } + client.threadPool().threadContext.stashContext().use { + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + actionListener.onResponse(response) + } - override fun onFailure(e: Exception) { - actionListener.onFailure(e) - } - } - ) + override fun onFailure(e: Exception) { + actionListener.onFailure(e) + } + }, + ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt index 099fd9a2b..742f63930 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transport/TransportSearchMonitorAction.kt @@ -39,100 +39,137 @@ import org.opensearch.transport.client.Client private val log = LogManager.getLogger(TransportSearchMonitorAction::class.java) -class TransportSearchMonitorAction @Inject constructor( - transportService: TransportService, - val settings: Settings, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val namedWriteableRegistry: NamedWriteableRegistry -) : HandledTransportAction( - AlertingActions.SEARCH_MONITORS_ACTION_NAME, transportService, actionFilters, ::SearchMonitorRequest -), - SecureTransportAction { - @Volatile - override var filterByEnabled: Boolean = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - init { - listenFilterBySettingChange(clusterService) - } +class TransportSearchMonitorAction + @Inject + constructor( + transportService: TransportService, + val settings: Settings, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + AlertingActions.SEARCH_MONITORS_ACTION_NAME, + transportService, + actionFilters, + ::SearchMonitorRequest, + ), + SecureTransportAction { + @Volatile + override var filterByEnabled: Boolean = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - override fun doExecute(task: Task, request: ActionRequest, actionListener: ActionListener) { - val transformedRequest = request as? SearchMonitorRequest - ?: recreateObject(request, namedWriteableRegistry) { - SearchMonitorRequest(it) - } + init { + listenFilterBySettingChange(clusterService) + } - val searchSourceBuilder = transformedRequest.searchRequest.source() - .seqNoAndPrimaryTerm(true) - .version(true) - val queryBuilder = if (searchSourceBuilder.query() == null) BoolQueryBuilder() - else QueryBuilders.boolQuery().must(searchSourceBuilder.query()) + override fun doExecute( + task: Task, + request: ActionRequest, + actionListener: ActionListener, + ) { + val transformedRequest = + request as? SearchMonitorRequest + ?: recreateObject(request, namedWriteableRegistry) { + SearchMonitorRequest(it) + } - // The SearchMonitor API supports one 'index' parameter of either the SCHEDULED_JOBS_INDEX or ALL_ALERT_INDEX_PATTERN. - // When querying the ALL_ALERT_INDEX_PATTERN, we don't want to check whether the MONITOR_TYPE field exists - // because we're querying alert indexes. - if (transformedRequest.searchRequest.indices().contains(ScheduledJob.SCHEDULED_JOBS_INDEX)) { - val monitorWorkflowType = QueryBuilders.boolQuery().should(QueryBuilders.existsQuery(Monitor.MONITOR_TYPE)) - .should(QueryBuilders.existsQuery(Workflow.WORKFLOW_TYPE)) - queryBuilder.must(monitorWorkflowType) - } + val searchSourceBuilder = + transformedRequest.searchRequest + .source() + .seqNoAndPrimaryTerm(true) + .version(true) + val queryBuilder = + if (searchSourceBuilder.query() == null) { + BoolQueryBuilder() + } else { + QueryBuilders.boolQuery().must(searchSourceBuilder.query()) + } + + // The SearchMonitor API supports one 'index' parameter of either the SCHEDULED_JOBS_INDEX or ALL_ALERT_INDEX_PATTERN. + // When querying the ALL_ALERT_INDEX_PATTERN, we don't want to check whether the MONITOR_TYPE field exists + // because we're querying alert indexes. + if (transformedRequest.searchRequest.indices().contains(ScheduledJob.SCHEDULED_JOBS_INDEX)) { + val monitorWorkflowType = + QueryBuilders + .boolQuery() + .should(QueryBuilders.existsQuery(Monitor.MONITOR_TYPE)) + .should(QueryBuilders.existsQuery(Workflow.WORKFLOW_TYPE)) + queryBuilder.must(monitorWorkflowType) + } - searchSourceBuilder.query(queryBuilder) - .seqNoAndPrimaryTerm(true) - .version(true) - addOwnerFieldIfNotExists(transformedRequest.searchRequest) - val user = readUserFromThreadContext(client) - client.threadPool().threadContext.stashContext().use { - resolve(transformedRequest, actionListener, user) + searchSourceBuilder + .query(queryBuilder) + .seqNoAndPrimaryTerm(true) + .version(true) + addOwnerFieldIfNotExists(transformedRequest.searchRequest) + val user = readUserFromThreadContext(client) + client.threadPool().threadContext.stashContext().use { + resolve(transformedRequest, actionListener, user) + } } - } - fun resolve(searchMonitorRequest: SearchMonitorRequest, actionListener: ActionListener, user: User?) { - if (user == null) { - // user header is null when: 1/ security is disabled. 2/when user is super-admin. - search(searchMonitorRequest.searchRequest, actionListener) - } else if (!doFilterForUser(user)) { - // security is enabled and filterby is disabled. - search(searchMonitorRequest.searchRequest, actionListener) - } else { - // security is enabled and filterby is enabled. - log.info("Filtering result by: ${user.backendRoles}") - addFilter(user, searchMonitorRequest.searchRequest.source(), "monitor.user.backend_roles.keyword") - search(searchMonitorRequest.searchRequest, actionListener) + fun resolve( + searchMonitorRequest: SearchMonitorRequest, + actionListener: ActionListener, + user: User?, + ) { + if (user == null) { + // user header is null when: 1/ security is disabled. 2/when user is super-admin. + search(searchMonitorRequest.searchRequest, actionListener) + } else if (!doFilterForUser(user)) { + // security is enabled and filterby is disabled. + search(searchMonitorRequest.searchRequest, actionListener) + } else { + // security is enabled and filterby is enabled. + log.info("Filtering result by: ${user.backendRoles}") + addFilter(user, searchMonitorRequest.searchRequest.source(), "monitor.user.backend_roles.keyword") + search(searchMonitorRequest.searchRequest, actionListener) + } } - } - fun search(searchRequest: SearchRequest, actionListener: ActionListener) { - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - actionListener.onResponse(response) - } + fun search( + searchRequest: SearchRequest, + actionListener: ActionListener, + ) { + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + actionListener.onResponse(response) + } + + override fun onFailure(ex: Exception) { + if (isIndexNotFoundException(ex)) { + log.error("Index not found while searching monitor", ex) + val emptyResponse = getEmptySearchResponse() + actionListener.onResponse(emptyResponse) + } else { + log.error("Unexpected error while searching monitor", ex) + actionListener.onFailure(AlertingException.wrap(ex)) + } + } + }, + ) + } - override fun onFailure(ex: Exception) { - if (isIndexNotFoundException(ex)) { - log.error("Index not found while searching monitor", ex) - val emptyResponse = getEmptySearchResponse() - actionListener.onResponse(emptyResponse) + private fun addOwnerFieldIfNotExists(searchRequest: SearchRequest) { + if (searchRequest.source().query() == null || searchRequest + .source() + .query() + .toString() + .contains("monitor.owner") == false + ) { + var boolQueryBuilder: BoolQueryBuilder = + if (searchRequest.source().query() == null) { + BoolQueryBuilder() } else { - log.error("Unexpected error while searching monitor", ex) - actionListener.onFailure(AlertingException.wrap(ex)) + QueryBuilders.boolQuery().must(searchRequest.source().query()) } - } + val bqb = BoolQueryBuilder() + bqb.should().add(BoolQueryBuilder().mustNot(ExistsQueryBuilder("monitor.owner"))) + bqb.should().add(BoolQueryBuilder().must(MatchQueryBuilder("monitor.owner", "alerting"))) + boolQueryBuilder.filter(bqb) + searchRequest.source().query(boolQueryBuilder) } - ) - } - - private fun addOwnerFieldIfNotExists(searchRequest: SearchRequest) { - if (searchRequest.source().query() == null || searchRequest.source().query().toString().contains("monitor.owner") == false) { - var boolQueryBuilder: BoolQueryBuilder = if (searchRequest.source().query() == null) BoolQueryBuilder() - else QueryBuilders.boolQuery().must(searchRequest.source().query()) - val bqb = BoolQueryBuilder() - bqb.should().add(BoolQueryBuilder().mustNot(ExistsQueryBuilder("monitor.owner"))) - bqb.should().add(BoolQueryBuilder().must(MatchQueryBuilder("monitor.owner", "alerting"))) - boolQueryBuilder.filter(bqb) - searchRequest.source().query(boolQueryBuilder) } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportDeleteMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportDeleteMonitorV2Action.kt index b35381474..7da368fe1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportDeleteMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportDeleteMonitorV2Action.kt @@ -47,102 +47,117 @@ private val log = LogManager.getLogger(TransportDeleteMonitorV2Action::class.jav * * @opensearch.experimental */ -class TransportDeleteMonitorV2Action @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val clusterService: ClusterService, - settings: Settings, - val xContentRegistry: NamedXContentRegistry -) : HandledTransportAction( - DeleteMonitorV2Action.NAME, transportService, actionFilters, ::DeleteMonitorV2Request -), - SecureTransportAction { - - @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } - listenFilterBySettingChange(clusterService) - } +class TransportDeleteMonitorV2Action + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val clusterService: ClusterService, + settings: Settings, + val xContentRegistry: NamedXContentRegistry, + ) : HandledTransportAction( + DeleteMonitorV2Action.NAME, + transportService, + actionFilters, + ::DeleteMonitorV2Request, + ), + SecureTransportAction { + @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute(task: Task, request: DeleteMonitorV2Request, actionListener: ActionListener) { - if (!alertingV2Enabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Alerting V2 is currently disabled, please enable it with the " + - "cluster setting: ${ALERTING_V2_ENABLED.key}.", - RestStatus.FORBIDDEN + override fun doExecute( + task: Task, + request: DeleteMonitorV2Request, + actionListener: ActionListener, + ) { + if (!alertingV2Enabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Alerting V2 is currently disabled, please enable it with the " + + "cluster setting: ${ALERTING_V2_ENABLED.key}.", + RestStatus.FORBIDDEN, + ), ), ) - ) - return - } + return + } - val user = readUserFromThreadContext(client) + val user = readUserFromThreadContext(client) - if (!validateUserBackendRoles(user, actionListener)) { - return - } + if (!validateUserBackendRoles(user, actionListener)) { + return + } - scope.launch { - try { - val monitorV2 = getMonitorV2(request.monitorV2Id, actionListener) ?: return@launch - - val canDelete = user == null || !doFilterForUser(user) || - checkUserPermissionsWithResource(user, monitorV2!!.user, actionListener, "monitor_v2", request.monitorV2Id) - - if (canDelete) { - val deleteResponse = - DeleteMonitorService.deleteMonitorV2(request.monitorV2Id, request.refreshPolicy) - actionListener.onResponse(deleteResponse) - } else { - actionListener.onFailure( - AlertingException( - "Not allowed to delete this Monitor V2", - RestStatus.FORBIDDEN, - IllegalStateException() + scope.launch { + try { + val monitorV2 = getMonitorV2(request.monitorV2Id, actionListener) ?: return@launch + + val canDelete = + user == null || !doFilterForUser(user) || + checkUserPermissionsWithResource(user, monitorV2!!.user, actionListener, "monitor_v2", request.monitorV2Id) + + if (canDelete) { + val deleteResponse = + DeleteMonitorService.deleteMonitorV2(request.monitorV2Id, request.refreshPolicy) + actionListener.onResponse(deleteResponse) + } else { + actionListener.onFailure( + AlertingException( + "Not allowed to delete this Monitor V2", + RestStatus.FORBIDDEN, + IllegalStateException(), + ), ) - ) + } + } catch (e: Exception) { + actionListener.onFailure(e) } - } catch (e: Exception) { - actionListener.onFailure(e) - } - // scheduled AlertV2Mover will sweep the alerts and find that this monitor no longer exists, - // and expire this monitor's alerts accordingly + // scheduled AlertV2Mover will sweep the alerts and find that this monitor no longer exists, + // and expire this monitor's alerts accordingly + } } - } - - private suspend fun getMonitorV2(monitorV2Id: String, actionListener: ActionListener): MonitorV2? { - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorV2Id) - val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } - if (!getResponse.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Monitor V2 with $monitorV2Id is not found", RestStatus.NOT_FOUND) + private suspend fun getMonitorV2( + monitorV2Id: String, + actionListener: ActionListener, + ): MonitorV2? { + val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, monitorV2Id) + + val getResponse: GetResponse = client.suspendUntil { get(getRequest, it) } + if (!getResponse.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("Monitor V2 with $monitorV2Id is not found", RestStatus.NOT_FOUND), + ), ) - ) - return null - } + return null + } - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, + ) + val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) - AlertingV2Utils.validateMonitorV2(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return null - } + AlertingV2Utils.validateMonitorV2(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return null + } - val monitorV2 = scheduledJob as MonitorV2 + val monitorV2 = scheduledJob as MonitorV2 - return monitorV2 + return monitorV2 + } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportExecuteMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportExecuteMonitorV2Action.kt index 05055afef..14ead5841 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportExecuteMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportExecuteMonitorV2Action.kt @@ -50,180 +50,190 @@ private val log = LogManager.getLogger(TransportExecuteMonitorV2Action::class.ja * * @opensearch.experimental */ -class TransportExecuteMonitorV2Action @Inject constructor( - private val transportService: TransportService, - private val client: Client, - private val clusterService: ClusterService, - private val runner: MonitorRunnerService, - actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry, - private val settings: Settings -) : HandledTransportAction( - ExecuteMonitorV2Action.NAME, transportService, actionFilters, ::ExecuteMonitorV2Request -), - SecureTransportAction { - - @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } - listenFilterBySettingChange(clusterService) - } +class TransportExecuteMonitorV2Action + @Inject + constructor( + private val transportService: TransportService, + private val client: Client, + private val clusterService: ClusterService, + private val runner: MonitorRunnerService, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, + private val settings: Settings, + ) : HandledTransportAction( + ExecuteMonitorV2Action.NAME, + transportService, + actionFilters, + ::ExecuteMonitorV2Request, + ), + SecureTransportAction { + @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute( - task: Task, - execMonitorV2Request: ExecuteMonitorV2Request, - actionListener: ActionListener - ) { - if (!alertingV2Enabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Alerting V2 is currently disabled, please enable it with the " + - "cluster setting: ${ALERTING_V2_ENABLED.key}", - RestStatus.FORBIDDEN + override fun doExecute( + task: Task, + execMonitorV2Request: ExecuteMonitorV2Request, + actionListener: ActionListener, + ) { + if (!alertingV2Enabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Alerting V2 is currently disabled, please enable it with the " + + "cluster setting: ${ALERTING_V2_ENABLED.key}", + RestStatus.FORBIDDEN, + ), ), ) - ) - return - } - - val userStr = client.threadPool().threadContext.getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) - log.debug("User and roles string from thread context: $userStr") - val user: User? = User.parse(userStr) - - client.threadPool().threadContext.stashContext().use { - /* first define a function that will be used later to run MonitorV2s */ - val executeMonitorV2 = fun (monitorV2: MonitorV2) { - runner.launch { - // get execution end, this will be used to compute the execution interval - // via look back window (if one is supplied) - val periodEnd = Instant.ofEpochMilli(execMonitorV2Request.requestEnd.millis) + return + } - // call the MonitorRunnerService to execute the MonitorV2 - try { - val monitorV2Type = when (monitorV2) { - is PPLSQLMonitor -> PPL_SQL_MONITOR_TYPE - else -> throw IllegalStateException("Unexpected MonitorV2 type: ${monitorV2.javaClass.name}") - } - log.info( - "Executing MonitorV2 from API - id: ${monitorV2.id}, type: $monitorV2Type, " + - "periodEnd: $periodEnd, manual: ${execMonitorV2Request.manual}" - ) - val monitorV2RunResult = runner.runJobV2( - monitorV2, - periodEnd, - execMonitorV2Request.dryrun, - execMonitorV2Request.manual, - transportService - ) - withContext(Dispatchers.IO) { - actionListener.onResponse(ExecuteMonitorV2Response(monitorV2RunResult)) - } - } catch (e: Exception) { - log.error("Unexpected error running monitor", e) - withContext(Dispatchers.IO) { - actionListener.onFailure(AlertingException.wrap(e)) + val userStr = + client.threadPool().threadContext.getTransient( + ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + ) + log.debug("User and roles string from thread context: $userStr") + val user: User? = User.parse(userStr) + + client.threadPool().threadContext.stashContext().use { + // first define a function that will be used later to run MonitorV2s + val executeMonitorV2 = fun (monitorV2: MonitorV2) { + runner.launch { + // get execution end, this will be used to compute the execution interval + // via look back window (if one is supplied) + val periodEnd = Instant.ofEpochMilli(execMonitorV2Request.requestEnd.millis) + + // call the MonitorRunnerService to execute the MonitorV2 + try { + val monitorV2Type = + when (monitorV2) { + is PPLSQLMonitor -> PPL_SQL_MONITOR_TYPE + else -> throw IllegalStateException("Unexpected MonitorV2 type: ${monitorV2.javaClass.name}") + } + log.info( + "Executing MonitorV2 from API - id: ${monitorV2.id}, type: $monitorV2Type, " + + "periodEnd: $periodEnd, manual: ${execMonitorV2Request.manual}", + ) + val monitorV2RunResult = + runner.runJobV2( + monitorV2, + periodEnd, + execMonitorV2Request.dryrun, + execMonitorV2Request.manual, + transportService, + ) + withContext(Dispatchers.IO) { + actionListener.onResponse(ExecuteMonitorV2Response(monitorV2RunResult)) + } + } catch (e: Exception) { + log.error("Unexpected error running monitor", e) + withContext(Dispatchers.IO) { + actionListener.onFailure(AlertingException.wrap(e)) + } } } } - } - /* now execute the MonitorV2 */ + // now execute the MonitorV2 - // if both monitor_v2 id and object were passed in, ignore object and proceed with id - if (execMonitorV2Request.monitorV2Id != null && execMonitorV2Request.monitorV2 != null) { - log.info( - "Both a monitor_v2 id and monitor_v2 object were passed in to ExecuteMonitorV2" + - "request. Proceeding to execute by monitor_v2 ID and ignoring monitor_v2 object." - ) - } + // if both monitor_v2 id and object were passed in, ignore object and proceed with id + if (execMonitorV2Request.monitorV2Id != null && execMonitorV2Request.monitorV2 != null) { + log.info( + "Both a monitor_v2 id and monitor_v2 object were passed in to ExecuteMonitorV2" + + "request. Proceeding to execute by monitor_v2 ID and ignoring monitor_v2 object.", + ) + } - if (execMonitorV2Request.monitorV2Id != null) { // execute with monitor ID case - // search the alerting-config index for the MonitorV2 with this ID - val getMonitorV2Request = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execMonitorV2Request.monitorV2Id) - client.get( - getMonitorV2Request, - object : ActionListener { - override fun onResponse(getMonitorV2Response: GetResponse) { - if (!getMonitorV2Response.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Can't find monitorV2 with id: ${getMonitorV2Response.id} to execute", - RestStatus.NOT_FOUND - ) + if (execMonitorV2Request.monitorV2Id != null) { // execute with monitor ID case + // search the alerting-config index for the MonitorV2 with this ID + val getMonitorV2Request = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX).id(execMonitorV2Request.monitorV2Id) + client.get( + getMonitorV2Request, + object : ActionListener { + override fun onResponse(getMonitorV2Response: GetResponse) { + if (!getMonitorV2Response.isExists) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Can't find monitorV2 with id: ${getMonitorV2Response.id} to execute", + RestStatus.NOT_FOUND, + ), + ), ) - ) - return - } - - if (getMonitorV2Response.isSourceEmpty) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Found monitorV2 with id: ${getMonitorV2Response.id} but it was empty", - RestStatus.NO_CONTENT - ) + return + } + + if (getMonitorV2Response.isSourceEmpty) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Found monitorV2 with id: ${getMonitorV2Response.id} but it was empty", + RestStatus.NO_CONTENT, + ), + ), + ) + return + } + + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getMonitorV2Response.sourceAsBytesRef, + XContentType.JSON, ) - ) - return - } - - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - getMonitorV2Response.sourceAsBytesRef, - XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getMonitorV2Response.id, getMonitorV2Response.version) + val scheduledJob = ScheduledJob.parse(xcp, getMonitorV2Response.id, getMonitorV2Response.version) - validateMonitorV2(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return - } + validateMonitorV2(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } - val monitorV2 = scheduledJob as MonitorV2 + val monitorV2 = scheduledJob as MonitorV2 - // security is enabled and filterby is enabled - // only run this check on manual executions, - // automatic scheduled job executions should - // bypass this check and proceed to execution - if (execMonitorV2Request.manual && - !checkUserPermissionsWithResource( + // security is enabled and filterby is enabled + // only run this check on manual executions, + // automatic scheduled job executions should + // bypass this check and proceed to execution + if (execMonitorV2Request.manual && + !checkUserPermissionsWithResource( user, monitorV2.user, actionListener, "monitor", - execMonitorV2Request.monitorV2Id + execMonitorV2Request.monitorV2Id, ) - ) { - return + ) { + return + } + + try { + executeMonitorV2(monitorV2) + } catch (e: Exception) { + actionListener.onFailure(AlertingException.wrap(e)) + } } - try { - executeMonitorV2(monitorV2) - } catch (e: Exception) { - actionListener.onFailure(AlertingException.wrap(e)) + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) } - } - - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - } + }, + ) + } else { // execute with monitor object case + try { + val monitorV2 = execMonitorV2Request.monitorV2!!.makeCopy(user = user) + executeMonitorV2(monitorV2) + } catch (e: Exception) { + actionListener.onFailure(AlertingException.wrap(e)) } - ) - } else { // execute with monitor object case - try { - val monitorV2 = execMonitorV2Request.monitorV2!!.makeCopy(user = user) - executeMonitorV2(monitorV2) - } catch (e: Exception) { - actionListener.onFailure(AlertingException.wrap(e)) } } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetAlertsV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetAlertsV2Action.kt index 4a0cec89d..4689b4550 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetAlertsV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetAlertsV2Action.kt @@ -60,152 +60,165 @@ private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) * * @opensearch.experimental */ -class TransportGetAlertsV2Action @Inject constructor( - transportService: TransportService, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val namedWriteableRegistry: NamedWriteableRegistry -) : HandledTransportAction( - GetAlertsV2Action.NAME, - transportService, - actionFilters, - ::GetAlertsV2Request -), - SecureTransportAction { - - @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) - - @Volatile - override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } - listenFilterBySettingChange(clusterService) - } +class TransportGetAlertsV2Action + @Inject + constructor( + transportService: TransportService, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + GetAlertsV2Action.NAME, + transportService, + actionFilters, + ::GetAlertsV2Request, + ), + SecureTransportAction { + @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) + + @Volatile + override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute( - task: Task, - getAlertsV2Request: GetAlertsV2Request, - actionListener: ActionListener, - ) { - if (!alertingV2Enabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Alerting V2 is currently disabled, please enable it with the " + - "cluster setting: ${ALERTING_V2_ENABLED.key}", - RestStatus.FORBIDDEN + override fun doExecute( + task: Task, + getAlertsV2Request: GetAlertsV2Request, + actionListener: ActionListener, + ) { + if (!alertingV2Enabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Alerting V2 is currently disabled, please enable it with the " + + "cluster setting: ${ALERTING_V2_ENABLED.key}", + RestStatus.FORBIDDEN, + ), ), ) - ) - return - } + return + } - val user = readUserFromThreadContext(client) + val user = readUserFromThreadContext(client) - val tableProp = getAlertsV2Request.table - val sortBuilder = SortBuilders - .fieldSort(tableProp.sortString) - .order(SortOrder.fromString(tableProp.sortOrder)) - if (!tableProp.missing.isNullOrBlank()) { - sortBuilder.missing(tableProp.missing) - } + val tableProp = getAlertsV2Request.table + val sortBuilder = + SortBuilders + .fieldSort(tableProp.sortString) + .order(SortOrder.fromString(tableProp.sortOrder)) + if (!tableProp.missing.isNullOrBlank()) { + sortBuilder.missing(tableProp.missing) + } - val queryBuilder = QueryBuilders.boolQuery() + val queryBuilder = QueryBuilders.boolQuery() - if (getAlertsV2Request.severityLevel != "ALL") { - queryBuilder.filter(QueryBuilders.termQuery(SEVERITY_FIELD, getAlertsV2Request.severityLevel)) - } + if (getAlertsV2Request.severityLevel != "ALL") { + queryBuilder.filter(QueryBuilders.termQuery(SEVERITY_FIELD, getAlertsV2Request.severityLevel)) + } - if (!getAlertsV2Request.monitorV2Ids.isNullOrEmpty()) { - queryBuilder.filter(QueryBuilders.termsQuery(MONITOR_V2_ID_FIELD, getAlertsV2Request.monitorV2Ids)) - } + if (!getAlertsV2Request.monitorV2Ids.isNullOrEmpty()) { + queryBuilder.filter(QueryBuilders.termsQuery(MONITOR_V2_ID_FIELD, getAlertsV2Request.monitorV2Ids)) + } - if (!tableProp.searchString.isNullOrBlank()) { - queryBuilder - .must( - QueryBuilders - .queryStringQuery(tableProp.searchString) - .defaultOperator(Operator.AND) - .field(MONITOR_V2_NAME_FIELD) - .field(TRIGGER_V2_NAME_FIELD) - ) - } - val searchSourceBuilder = SearchSourceBuilder() - .version(true) - .seqNoAndPrimaryTerm(true) - .query(queryBuilder) - .sort(sortBuilder) - .size(tableProp.size) - .from(tableProp.startIndex) - - client.threadPool().threadContext.stashContext().use { - scope.launch { - try { - getAlerts(AlertV2Indices.ALERT_V2_INDEX, searchSourceBuilder, actionListener, user) - } catch (t: Exception) { - log.error("Failed to get alerts", t) - if (t is AlertingException) { - actionListener.onFailure(t) - } else { - actionListener.onFailure(AlertingException.wrap(t)) + if (!tableProp.searchString.isNullOrBlank()) { + queryBuilder + .must( + QueryBuilders + .queryStringQuery(tableProp.searchString) + .defaultOperator(Operator.AND) + .field(MONITOR_V2_NAME_FIELD) + .field(TRIGGER_V2_NAME_FIELD), + ) + } + val searchSourceBuilder = + SearchSourceBuilder() + .version(true) + .seqNoAndPrimaryTerm(true) + .query(queryBuilder) + .sort(sortBuilder) + .size(tableProp.size) + .from(tableProp.startIndex) + + client.threadPool().threadContext.stashContext().use { + scope.launch { + try { + getAlerts(AlertV2Indices.ALERT_V2_INDEX, searchSourceBuilder, actionListener, user) + } catch (t: Exception) { + log.error("Failed to get alerts", t) + if (t is AlertingException) { + actionListener.onFailure(t) + } else { + actionListener.onFailure(AlertingException.wrap(t)) + } } } } } - } - fun getAlerts( - alertIndex: String, - searchSourceBuilder: SearchSourceBuilder, - actionListener: ActionListener, - user: User? - ) { - try { - // if user is null, security plugin is disabled or user is super-admin - // if doFilterForUser() is false, security is enabled but filterby is disabled - if (user != null && doFilterForUser(user)) { - // if security is enabled and filterby is enabled, add search filter - log.info("Filtering result by: ${user.backendRoles}") - addFilter(user, searchSourceBuilder, "$MONITOR_V2_USER_FIELD.$BACKEND_ROLES_FIELD.keyword") - } + fun getAlerts( + alertIndex: String, + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + user: User?, + ) { + try { + // if user is null, security plugin is disabled or user is super-admin + // if doFilterForUser() is false, security is enabled but filterby is disabled + if (user != null && doFilterForUser(user)) { + // if security is enabled and filterby is enabled, add search filter + log.info("Filtering result by: ${user.backendRoles}") + addFilter(user, searchSourceBuilder, "$MONITOR_V2_USER_FIELD.$BACKEND_ROLES_FIELD.keyword") + } - search(alertIndex, searchSourceBuilder, actionListener) - } catch (ex: IOException) { - actionListener.onFailure(AlertingException.wrap(ex)) + search(alertIndex, searchSourceBuilder, actionListener) + } catch (ex: IOException) { + actionListener.onFailure(AlertingException.wrap(ex)) + } } - } - fun search(alertIndex: String, searchSourceBuilder: SearchSourceBuilder, actionListener: ActionListener) { - val searchRequest = SearchRequest() - .indices(alertIndex) - .source(searchSourceBuilder) - - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - val totalAlertCount = response.hits.totalHits?.value?.toInt() - val alerts = response.hits.map { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - val alertV2 = AlertV2.parse(xcp, hit.id, hit.version) - alertV2 + fun search( + alertIndex: String, + searchSourceBuilder: SearchSourceBuilder, + actionListener: ActionListener, + ) { + val searchRequest = + SearchRequest() + .indices(alertIndex) + .source(searchSourceBuilder) + + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + val totalAlertCount = + response.hits.totalHits + ?.value + ?.toInt() + val alerts = + response.hits.map { hit -> + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + val alertV2 = AlertV2.parse(xcp, hit.id, hit.version) + alertV2 + } + actionListener.onResponse(GetAlertsV2Response(alerts, totalAlertCount)) } - actionListener.onResponse(GetAlertsV2Response(alerts, totalAlertCount)) - } - override fun onFailure(t: Exception) { - actionListener.onFailure(t) - } - } - ) + override fun onFailure(t: Exception) { + actionListener.onFailure(t) + } + }, + ) + } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetMonitorV2Action.kt index bc10421c7..ec981bfdf 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportGetMonitorV2Action.kt @@ -46,127 +46,140 @@ private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) * * @opensearch.experimental */ -class TransportGetMonitorV2Action @Inject constructor( - transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val xContentRegistry: NamedXContentRegistry, - val clusterService: ClusterService, - settings: Settings, -) : HandledTransportAction( - GetMonitorV2Action.NAME, - transportService, - actionFilters, - ::GetMonitorV2Request -), - SecureTransportAction { - - @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) - - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } - listenFilterBySettingChange(clusterService) - } +class TransportGetMonitorV2Action + @Inject + constructor( + transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val xContentRegistry: NamedXContentRegistry, + val clusterService: ClusterService, + settings: Settings, + ) : HandledTransportAction( + GetMonitorV2Action.NAME, + transportService, + actionFilters, + ::GetMonitorV2Request, + ), + SecureTransportAction { + @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute(task: Task, request: GetMonitorV2Request, actionListener: ActionListener) { - if (!alertingV2Enabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Alerting V2 is currently disabled, please enable it with the " + - "cluster setting: ${ALERTING_V2_ENABLED.key}", - RestStatus.FORBIDDEN + override fun doExecute( + task: Task, + request: GetMonitorV2Request, + actionListener: ActionListener, + ) { + if (!alertingV2Enabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Alerting V2 is currently disabled, please enable it with the " + + "cluster setting: ${ALERTING_V2_ENABLED.key}", + RestStatus.FORBIDDEN, + ), ), ) - ) - return - } - - val getRequest = GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, request.monitorV2Id) - .version(request.version) - .fetchSourceContext(request.srcContext) - - val user = readUserFromThreadContext(client) - - if (!validateUserBackendRoles(user, actionListener)) { - return - } - - client.threadPool().threadContext.stashContext().use { - client.get( - getRequest, - object : ActionListener { - override fun onResponse(response: GetResponse) { - if (!response.isExists) { - actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException("MonitorV2 not found.", RestStatus.NOT_FOUND)) - ) - return - } - - if (response.isSourceEmpty) { - actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException("MonitorV2 found but was empty.", RestStatus.NO_CONTENT)) - ) - return - } + return + } + + val getRequest = + GetRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, request.monitorV2Id) + .version(request.version) + .fetchSourceContext(request.srcContext) + + val user = readUserFromThreadContext(client) + + if (!validateUserBackendRoles(user, actionListener)) { + return + } + + client.threadPool().threadContext.stashContext().use { + client.get( + getRequest, + object : ActionListener { + override fun onResponse(response: GetResponse) { + if (!response.isExists) { + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException("MonitorV2 not found.", RestStatus.NOT_FOUND)), + ) + return + } + + if (response.isSourceEmpty) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException("MonitorV2 found but was empty.", RestStatus.NO_CONTENT), + ), + ) + return + } + + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ) - val xcp = XContentHelper.createParser( - xContentRegistry, - LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, - XContentType.JSON - ) + val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) - val scheduledJob = ScheduledJob.parse(xcp, response.id, response.version) + validateMonitorV2(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } - validateMonitorV2(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return - } + val monitorV2 = scheduledJob as MonitorV2 - val monitorV2 = scheduledJob as MonitorV2 - - // security is enabled and filterby is enabled - if (!checkUserPermissionsWithResource( - user, - monitorV2.user, - actionListener, - "monitor", - request.monitorV2Id + // security is enabled and filterby is enabled + if (!checkUserPermissionsWithResource( + user, + monitorV2.user, + actionListener, + "monitor", + request.monitorV2Id, + ) + ) { + return + } + + actionListener.onResponse( + GetMonitorV2Response( + response.id, + response.version, + response.seqNo, + response.primaryTerm, + monitorV2, + ), ) - ) { - return } - actionListener.onResponse( - GetMonitorV2Response( - response.id, - response.version, - response.seqNo, - response.primaryTerm, - monitorV2 - ) - ) - } - - override fun onFailure(e: Exception) { - if (isIndexNotFoundException(e)) { - log.error("Index not found while getting monitor V2", e) - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("Monitor V2 not found. Backing index is missing.", RestStatus.NOT_FOUND, e) + override fun onFailure(e: Exception) { + if (isIndexNotFoundException(e)) { + log.error("Index not found while getting monitor V2", e) + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Monitor V2 not found. Backing index is missing.", + RestStatus.NOT_FOUND, + e, + ), + ), ) - ) - } else { - log.error("Unexpected error while getting monitor", e) - actionListener.onFailure(AlertingException.wrap(e)) + } else { + log.error("Unexpected error while getting monitor", e) + actionListener.onFailure(AlertingException.wrap(e)) + } } - } - } - ) + }, + ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportIndexMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportIndexMonitorV2Action.kt index e12637fc6..80a5ed356 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportIndexMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportIndexMonitorV2Action.kt @@ -91,778 +91,838 @@ private val scope: CoroutineScope = CoroutineScope(Dispatchers.IO) * * @opensearch.experimental */ -class TransportIndexMonitorV2Action @Inject constructor( - val transportService: TransportService, - val client: Client, - actionFilters: ActionFilters, - val scheduledJobIndices: ScheduledJobIndices, - val clusterService: ClusterService, - val settings: Settings, - val xContentRegistry: NamedXContentRegistry, - val namedWriteableRegistry: NamedWriteableRegistry, -) : HandledTransportAction( - IndexMonitorV2Action.NAME, transportService, actionFilters, ::IndexMonitorV2Request -), - SecureTransportAction { - - // adjustable limits (via settings) - @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) - @Volatile private var maxMonitors = ALERTING_V2_MAX_MONITORS.get(settings) - @Volatile private var maxThrottleDuration = ALERTING_V2_MAX_THROTTLE_DURATION.get(settings) - @Volatile private var maxExpireDuration = ALERTING_V2_MAX_EXPIRE_DURATION.get(settings) - @Volatile private var maxLookBackWindow = ALERTING_V2_MAX_LOOK_BACK_WINDOW.get(settings) - @Volatile private var maxQueryLength = ALERTING_V2_MAX_QUERY_LENGTH.get(settings) - @Volatile private var maxQueryResults = ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS.get(settings) - @Volatile private var notificationSubjectMaxLength = NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH.get(settings) - @Volatile private var notificationMessageMaxLength = NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH.get(settings) - @Volatile private var requestTimeout = REQUEST_TIMEOUT.get(settings) - @Volatile private var indexTimeout = INDEX_TIMEOUT.get(settings) - @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_MONITORS) { maxMonitors = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_THROTTLE_DURATION) { maxThrottleDuration = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_EXPIRE_DURATION) { maxExpireDuration = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_LOOK_BACK_WINDOW) { maxLookBackWindow = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_QUERY_LENGTH) { maxQueryLength = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS) { maxQueryResults = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH) { - notificationSubjectMaxLength = it - } - clusterService.clusterSettings.addSettingsUpdateConsumer(NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH) { - notificationMessageMaxLength = it +class TransportIndexMonitorV2Action + @Inject + constructor( + val transportService: TransportService, + val client: Client, + actionFilters: ActionFilters, + val scheduledJobIndices: ScheduledJobIndices, + val clusterService: ClusterService, + val settings: Settings, + val xContentRegistry: NamedXContentRegistry, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + IndexMonitorV2Action.NAME, + transportService, + actionFilters, + ::IndexMonitorV2Request, + ), + SecureTransportAction { + // adjustable limits (via settings) + @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) + + @Volatile private var maxMonitors = ALERTING_V2_MAX_MONITORS.get(settings) + + @Volatile private var maxThrottleDuration = ALERTING_V2_MAX_THROTTLE_DURATION.get(settings) + + @Volatile private var maxExpireDuration = ALERTING_V2_MAX_EXPIRE_DURATION.get(settings) + + @Volatile private var maxLookBackWindow = ALERTING_V2_MAX_LOOK_BACK_WINDOW.get(settings) + + @Volatile private var maxQueryLength = ALERTING_V2_MAX_QUERY_LENGTH.get(settings) + + @Volatile private var maxQueryResults = ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS.get(settings) + + @Volatile private var notificationSubjectMaxLength = NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH.get(settings) + + @Volatile private var notificationMessageMaxLength = NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH.get(settings) + + @Volatile private var requestTimeout = REQUEST_TIMEOUT.get(settings) + + @Volatile private var indexTimeout = INDEX_TIMEOUT.get(settings) + + @Volatile override var filterByEnabled = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) + + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_MONITORS) { maxMonitors = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_THROTTLE_DURATION) { maxThrottleDuration = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_EXPIRE_DURATION) { maxExpireDuration = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_LOOK_BACK_WINDOW) { maxLookBackWindow = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_MAX_QUERY_LENGTH) { maxQueryLength = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS) { maxQueryResults = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH) { + notificationSubjectMaxLength = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH) { + notificationMessageMaxLength = it + } + clusterService.clusterSettings.addSettingsUpdateConsumer(REQUEST_TIMEOUT) { requestTimeout = it } + clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } + listenFilterBySettingChange(clusterService) } - clusterService.clusterSettings.addSettingsUpdateConsumer(REQUEST_TIMEOUT) { requestTimeout = it } - clusterService.clusterSettings.addSettingsUpdateConsumer(INDEX_TIMEOUT) { indexTimeout = it } - listenFilterBySettingChange(clusterService) - } - override fun doExecute( - task: Task, - indexMonitorV2Request: IndexMonitorV2Request, - actionListener: ActionListener - ) { - if (!alertingV2Enabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Alerting V2 is currently disabled, please enable it with the " + - "cluster setting: ${ALERTING_V2_ENABLED.key}", - RestStatus.FORBIDDEN + override fun doExecute( + task: Task, + indexMonitorV2Request: IndexMonitorV2Request, + actionListener: ActionListener, + ) { + if (!alertingV2Enabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Alerting V2 is currently disabled, please enable it with the " + + "cluster setting: ${ALERTING_V2_ENABLED.key}", + RestStatus.FORBIDDEN, + ), ), ) - ) - return - } + return + } - // read the user from thread context immediately, before - // downstream flows spin up new threads with fresh context - val user = readUserFromThreadContext(client) - - // validate the MonitorV2 based on its type - when (indexMonitorV2Request.monitorV2) { - is PPLSQLMonitor -> validatePplSqlMonitorUserPermissionsAndQuery( - indexMonitorV2Request, - user, - object : ActionListener { // validationListener - override fun onResponse(response: Unit) { - // user permissions to indices have already been checked - // proceed without the context of the user, otherwise, - // we would get permissions errors trying to search the alerting-config - // index as the user. pass the user object itself so backend - // roles can be matched and checked downstream - client.threadPool().threadContext.stashContext().use { - val pplSqlMonitor = indexMonitorV2Request.monitorV2 as PPLSQLMonitor - if (user == null) { - indexMonitorV2Request.monitorV2 = pplSqlMonitor - .copy(user = User("", listOf(), listOf(), mapOf())) - } else { - indexMonitorV2Request.monitorV2 = pplSqlMonitor - .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) + // read the user from thread context immediately, before + // downstream flows spin up new threads with fresh context + val user = readUserFromThreadContext(client) + + // validate the MonitorV2 based on its type + when (indexMonitorV2Request.monitorV2) { + is PPLSQLMonitor -> { + validatePplSqlMonitorUserPermissionsAndQuery( + indexMonitorV2Request, + user, + object : ActionListener { // validationListener + override fun onResponse(response: Unit) { + // user permissions to indices have already been checked + // proceed without the context of the user, otherwise, + // we would get permissions errors trying to search the alerting-config + // index as the user. pass the user object itself so backend + // roles can be matched and checked downstream + client.threadPool().threadContext.stashContext().use { + val pplSqlMonitor = indexMonitorV2Request.monitorV2 as PPLSQLMonitor + if (user == null) { + indexMonitorV2Request.monitorV2 = + pplSqlMonitor + .copy(user = User("", listOf(), listOf(), mapOf())) + } else { + indexMonitorV2Request.monitorV2 = + pplSqlMonitor + .copy(user = User(user.name, user.backendRoles, user.roles, user.customAttributes)) + } + checkScheduledJobIndex(indexMonitorV2Request, actionListener, user) + } } - checkScheduledJobIndex(indexMonitorV2Request, actionListener, user) - } - } - override fun onFailure(e: Exception) { - actionListener.onFailure(e) - } + override fun onFailure(e: Exception) { + actionListener.onFailure(e) + } + }, + ) } - ) - else -> actionListener.onFailure( - AlertingException.wrap( - IllegalStateException( - "unexpected MonitorV2 type: ${indexMonitorV2Request.monitorV2.javaClass.name}" + + else -> { + actionListener.onFailure( + AlertingException.wrap( + IllegalStateException( + "unexpected MonitorV2 type: ${indexMonitorV2Request.monitorV2.javaClass.name}", + ), + ), ) - ) - ) + } + } } - } - // validates the PPL Monitor, its query, and user's permissions to the indices it queries by submitting it to SQL/PPL plugin - private fun validatePplSqlMonitorUserPermissionsAndQuery( - indexMonitorV2Request: IndexMonitorV2Request, - user: User?, - validationListener: ActionListener - ) { - client.threadPool().threadContext.stashContext().use { - scope.launch { - val singleThreadContext = newSingleThreadContext("IndexMonitorV2ActionThread") - withContext(singleThreadContext) { - it.restore() - - val pplSqlMonitor = indexMonitorV2Request.monitorV2 as PPLSQLMonitor - - val pplQueryValid = validatePplSqlQuery(pplSqlMonitor, validationListener) - if (!pplQueryValid) { - return@withContext - } + // validates the PPL Monitor, its query, and user's permissions to the indices it queries by submitting it to SQL/PPL plugin + private fun validatePplSqlMonitorUserPermissionsAndQuery( + indexMonitorV2Request: IndexMonitorV2Request, + user: User?, + validationListener: ActionListener, + ) { + client.threadPool().threadContext.stashContext().use { + scope.launch { + val singleThreadContext = newSingleThreadContext("IndexMonitorV2ActionThread") + withContext(singleThreadContext) { + it.restore() - // run basic validations against the PPL/SQL Monitor - val pplSqlMonitorValid = validatePplSqlMonitor(pplSqlMonitor, validationListener) - if (!pplSqlMonitorValid) { - return@withContext - } + val pplSqlMonitor = indexMonitorV2Request.monitorV2 as PPLSQLMonitor - // check the user for basic permissions - val userHasPermissions = checkUser(user, indexMonitorV2Request, validationListener) - if (!userHasPermissions) { - return@withContext - } + val pplQueryValid = validatePplSqlQuery(pplSqlMonitor, validationListener) + if (!pplQueryValid) { + return@withContext + } - // check that given timestamp field is valid - val timestampFieldValid = checkPplQueryIndicesForTimestampField(pplSqlMonitor, validationListener) - if (!timestampFieldValid) { - return@withContext - } + // run basic validations against the PPL/SQL Monitor + val pplSqlMonitorValid = validatePplSqlMonitor(pplSqlMonitor, validationListener) + if (!pplSqlMonitorValid) { + return@withContext + } + + // check the user for basic permissions + val userHasPermissions = checkUser(user, indexMonitorV2Request, validationListener) + if (!userHasPermissions) { + return@withContext + } - validationListener.onResponse(Unit) + // check that given timestamp field is valid + val timestampFieldValid = checkPplQueryIndicesForTimestampField(pplSqlMonitor, validationListener) + if (!timestampFieldValid) { + return@withContext + } + + validationListener.onResponse(Unit) + } } } } - } - private suspend fun validatePplSqlQuery( - pplSqlMonitor: PPLSQLMonitor, - validationListener: ActionListener - ): Boolean { - // first attempt to run the monitor query and all possible - // extensions of it (from custom conditions) - try { - // first run the base query as is. - // if there are any PPL syntax or index not found or other errors, - // this will throw an exception - executePplQuery(pplSqlMonitor.query, clusterService.state().nodes.localNode, transportService) - - // now scan all the triggers with custom conditions, and ensure each query constructed - // from the base query + custom condition is valid - for (pplTrigger in pplSqlMonitor.triggers) { - if (pplTrigger.conditionType != ConditionType.CUSTOM) { - continue - } + private suspend fun validatePplSqlQuery( + pplSqlMonitor: PPLSQLMonitor, + validationListener: ActionListener, + ): Boolean { + // first attempt to run the monitor query and all possible + // extensions of it (from custom conditions) + try { + // first run the base query as is. + // if there are any PPL syntax or index not found or other errors, + // this will throw an exception + executePplQuery(pplSqlMonitor.query, clusterService.state().nodes.localNode, transportService) + + // now scan all the triggers with custom conditions, and ensure each query constructed + // from the base query + custom condition is valid + for (pplTrigger in pplSqlMonitor.triggers) { + if (pplTrigger.conditionType != ConditionType.CUSTOM) { + continue + } + + val evalResultVar = findEvalResultVar(pplTrigger.customCondition!!) - val evalResultVar = findEvalResultVar(pplTrigger.customCondition!!) + val queryWithCustomCondition = appendCustomCondition(pplSqlMonitor.query, pplTrigger.customCondition!!) - val queryWithCustomCondition = appendCustomCondition(pplSqlMonitor.query, pplTrigger.customCondition!!) + val executePplQueryResponse = + executePplQuery( + queryWithCustomCondition, + clusterService.state().nodes.localNode, + transportService, + ) - val executePplQueryResponse = executePplQuery( - queryWithCustomCondition, - clusterService.state().nodes.localNode, - transportService + val evalResultVarIdx = findEvalResultVarIdxInSchema(executePplQueryResponse, evalResultVar) + + val resultVarType = + executePplQueryResponse + .getJSONArray("schema") + .getJSONObject(evalResultVarIdx) + .getString("type") + + // custom conditions must evaluate to a boolean result, otherwise it's invalid + if (resultVarType != "boolean") { + validationListener.onFailure( + AlertingException.wrap( + IllegalArgumentException( + "Custom condition in trigger ${pplTrigger.name} is invalid because it does not " + + "evaluate to a boolean, but instead to type: $resultVarType", + ), + ), + ) + return false + } + } + } catch (e: Exception) { + validationListener.onFailure( + AlertingException.wrap( + IllegalArgumentException("Validation error for PPL Query in PPL Monitor: ${e.userErrorMessage()}"), + ), ) + return false + } - val evalResultVarIdx = findEvalResultVarIdxInSchema(executePplQueryResponse, evalResultVar) + return true + } - val resultVarType = executePplQueryResponse - .getJSONArray("schema") - .getJSONObject(evalResultVarIdx) - .getString("type") + private fun validatePplSqlMonitor( + pplSqlMonitor: PPLSQLMonitor, + validationListener: ActionListener, + ): Boolean { + // ensure the trigger throttle and expire durations are valid + pplSqlMonitor.triggers.forEach { trigger -> + trigger.throttleDuration?.let { throttleDuration -> + if (throttleDuration > maxThrottleDuration) { + validationListener.onFailure( + AlertingException.wrap( + IllegalArgumentException( + "Throttle duration must be at most $maxThrottleDuration but was $throttleDuration", + ), + ), + ) + return false + } + } - // custom conditions must evaluate to a boolean result, otherwise it's invalid - if (resultVarType != "boolean") { + if (trigger.expireDuration > maxExpireDuration) { validationListener.onFailure( AlertingException.wrap( IllegalArgumentException( - "Custom condition in trigger ${pplTrigger.name} is invalid because it does not " + - "evaluate to a boolean, but instead to type: $resultVarType" - ) - ) + "Expire duration must be at most $maxExpireDuration but was ${trigger.expireDuration}", + ), + ), ) return false } - } - } catch (e: Exception) { - validationListener.onFailure( - AlertingException.wrap( - IllegalArgumentException("Validation error for PPL Query in PPL Monitor: ${e.userErrorMessage()}") - ) - ) - return false - } - - return true - } - private fun validatePplSqlMonitor(pplSqlMonitor: PPLSQLMonitor, validationListener: ActionListener): Boolean { - // ensure the trigger throttle and expire durations are valid - pplSqlMonitor.triggers.forEach { trigger -> - trigger.throttleDuration?.let { throttleDuration -> - if (throttleDuration > maxThrottleDuration) { + if (trigger.conditionType == ConditionType.NUMBER_OF_RESULTS && + trigger.numResultsValue!! > maxQueryResults + ) { validationListener.onFailure( AlertingException.wrap( IllegalArgumentException( - "Throttle duration must be at most $maxThrottleDuration but was $throttleDuration" - ) - ) + "Trigger ${trigger.id} checks for number of results threshold of ${trigger.numResultsValue}, " + + "but Alerting V2 is configured only to retrieve $maxQueryResults query results maximum. " + + "Please lower the number of results value to one below this maximum value, or adjust the cluster " + + "setting: $ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS.key}", + ), + ), ) return false } - } - if (trigger.expireDuration > maxExpireDuration) { - validationListener.onFailure( - AlertingException.wrap( - IllegalArgumentException( - "Expire duration must be at most $maxExpireDuration but was ${trigger.expireDuration}" + trigger.actions.forEach { action -> + if (action.subjectTemplate?.idOrCode?.length!! > notificationSubjectMaxLength) { + validationListener.onFailure( + AlertingException.wrap( + IllegalArgumentException( + "Notification subject source cannot exceed length: $notificationSubjectMaxLength", + ), + ), ) - ) - ) - return false + return false + } + + if (action.messageTemplate.idOrCode.length > notificationMessageMaxLength) { + validationListener.onFailure( + AlertingException.wrap( + IllegalArgumentException( + "Notification message source cannot exceed length: $notificationMessageMaxLength", + ), + ), + ) + return false + } + } } - if (trigger.conditionType == ConditionType.NUMBER_OF_RESULTS && - trigger.numResultsValue!! > maxQueryResults - ) { + // ensure the query length doesn't exceed the limit + if (pplSqlMonitor.query.length > maxQueryLength) { validationListener.onFailure( AlertingException.wrap( IllegalArgumentException( - "Trigger ${trigger.id} checks for number of results threshold of ${trigger.numResultsValue}, " + - "but Alerting V2 is configured only to retrieve $maxQueryResults query results maximum. " + - "Please lower the number of results value to one below this maximum value, or adjust the cluster " + - "setting: $ALERTING_V2_QUERY_RESULTS_MAX_DATAROWS.key}" - ) - ) + "PPL Query length must be at most $maxQueryLength but was ${pplSqlMonitor.query.length}", + ), + ), ) return false } - trigger.actions.forEach { action -> - if (action.subjectTemplate?.idOrCode?.length!! > notificationSubjectMaxLength) { + // ensure the look back window doesn't exceed the limit + pplSqlMonitor.lookBackWindow?.let { + if (pplSqlMonitor.lookBackWindow > maxLookBackWindow) { validationListener.onFailure( AlertingException.wrap( IllegalArgumentException( - "Notification subject source cannot exceed length: $notificationSubjectMaxLength" - ) - ) + "Look back window must be at most $maxLookBackWindow minutes but was ${pplSqlMonitor.lookBackWindow}", + ), + ), ) return false } + } - if (action.messageTemplate.idOrCode.length > notificationMessageMaxLength) { + return true + } + + private fun checkUser( + user: User?, + indexMonitorV2Request: IndexMonitorV2Request, + validationListener: ActionListener, + ): Boolean { + // check initial user permissions + if (!validateUserBackendRoles(user, validationListener)) { + return false + } + + if ( + user != null && + !isAdmin(user) && + indexMonitorV2Request.rbacRoles != null + ) { + if (indexMonitorV2Request.rbacRoles.stream().anyMatch { !user.backendRoles.contains(it) }) { + log.debug( + "User specified backend roles, ${indexMonitorV2Request.rbacRoles}, " + + "that they don't have access to. User backend roles: ${user.backendRoles}", + ) validationListener.onFailure( AlertingException.wrap( - IllegalArgumentException( - "Notification message source cannot exceed length: $notificationMessageMaxLength" - ) - ) + OpenSearchStatusException( + "User specified backend roles that they don't have access to. Contact administrator", + RestStatus.FORBIDDEN, + ), + ), + ) + return false + } else if (indexMonitorV2Request.rbacRoles.isEmpty()) { + log.debug( + "Non-admin user are not allowed to specify an empty set of backend roles. " + + "Please don't pass in the parameter or pass in at least one backend role.", + ) + validationListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Non-admin user are not allowed to specify an empty set of backend roles.", + RestStatus.FORBIDDEN, + ), + ), ) return false } } - } - // ensure the query length doesn't exceed the limit - if (pplSqlMonitor.query.length > maxQueryLength) { - validationListener.onFailure( - AlertingException.wrap( - IllegalArgumentException( - "PPL Query length must be at most $maxQueryLength but was ${pplSqlMonitor.query.length}" - ) - ) - ) - return false + return true } - // ensure the look back window doesn't exceed the limit - pplSqlMonitor.lookBackWindow?.let { - if (pplSqlMonitor.lookBackWindow > maxLookBackWindow) { + // if look back window is specified, all the indices that the PPL query searches + // must contain the timestamp field specified in the PPL Monitor, and they must + // all be of OpenSearch data type "date" + private suspend fun checkPplQueryIndicesForTimestampField( + pplSqlMonitor: PPLSQLMonitor, + validationListener: ActionListener, + ): Boolean { + if (pplSqlMonitor.lookBackWindow == null) { + // if no look back window was specified, no need + // to check for timestamp field in PPL query indices + return true + } + + val pplQuery = pplSqlMonitor.query + val timestampField = pplSqlMonitor.timestampField + + try { + val indices = getIndicesFromPplQuery(pplQuery) + val getMappingsRequest = GetMappingsRequest().indices(*indices.toTypedArray()) + val getMappingsResponse = client.suspendUntil { admin().indices().getMappings(getMappingsRequest, it) } + + val metadataMap = getMappingsResponse.mappings + + for (index in metadataMap.keys) { + val metadata = metadataMap[index]!!.sourceAsMap["properties"] as Map + if (!metadata.keys.contains(timestampField)) { + validationListener.onFailure( + AlertingException.wrap( + IllegalArgumentException("Query index $index don't contain given timestamp field: $timestampField"), + ), + ) + return false + } + val typeInfo = metadata[timestampField] as Map + val type = typeInfo["type"] + val dateType = "date" + val dateNanosType = "date_nanos" + if (type != dateType && type != dateNanosType) { + validationListener.onFailure( + AlertingException.wrap( + IllegalArgumentException( + "Timestamp field: $timestampField is present in index $index " + + "but is type $type instead of $dateType or $dateNanosType", + ), + ), + ) + return false + } + } + } catch (e: Exception) { + log.error("failed to read query indices' fields when checking for timestamp field: $timestampField") validationListener.onFailure( AlertingException.wrap( IllegalArgumentException( - "Look back window must be at most $maxLookBackWindow minutes but was ${pplSqlMonitor.lookBackWindow}" - ) - ) + "failed to read query indices' fields when checking for timestamp field: $timestampField", + e, + ), + ), ) return false } + + return true } - return true - } + private fun checkScheduledJobIndex( + indexMonitorRequest: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, + ) { + // user permissions to indices have already been checked + // proceed without the context of the user, otherwise, + // we would get permissions errors trying to search the alerting-config + // index as the user + client.threadPool().threadContext.stashContext().use { + // check to see if alerting-config index (scheduled job index) is created and updated before indexing MonitorV2 into it + if (!scheduledJobIndices.scheduledJobIndexExists()) { // if alerting-config index doesn't exist, send request to create it + scheduledJobIndices.initScheduledJobIndex( + object : ActionListener { + override fun onResponse(response: CreateIndexResponse) { + onCreateMappingsResponse(response.isAcknowledged, indexMonitorRequest, actionListener, user) + } - private fun checkUser( - user: User?, - indexMonitorV2Request: IndexMonitorV2Request, - validationListener: ActionListener - ): Boolean { - /* check initial user permissions */ - if (!validateUserBackendRoles(user, validationListener)) { - return false + override fun onFailure(e: Exception) { + if (ExceptionsHelper.unwrapCause(e) is ResourceAlreadyExistsException) { + scope.launch { + // Wait for the yellow status + val clusterHealthRequest = + ClusterHealthRequest() + .indices(SCHEDULED_JOBS_INDEX) + .waitForYellowStatus() + val response: ClusterHealthResponse = + client.suspendUntil { + execute(ClusterHealthAction.INSTANCE, clusterHealthRequest, it) + } + if (response.isTimedOut) { + actionListener.onFailure( + OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy"), + ) + } + // Retry mapping of monitor + onCreateMappingsResponse(true, indexMonitorRequest, actionListener, user) + } + } else { + actionListener.onFailure(AlertingException.wrap(e)) + } + } + }, + ) + } else if (!IndexUtils.scheduledJobIndexUpdated) { + IndexUtils.updateIndexMapping( + SCHEDULED_JOBS_INDEX, + ScheduledJobIndices.scheduledJobMappings(), + clusterService.state(), + client.admin().indices(), + object : ActionListener { + override fun onResponse(response: AcknowledgedResponse) { + onUpdateMappingsResponse(response, indexMonitorRequest, actionListener, user) + } + + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) + } else { + prepareMonitorIndexing(indexMonitorRequest, actionListener, user) + } + } } - if ( - user != null && - !isAdmin(user) && - indexMonitorV2Request.rbacRoles != null + private fun onCreateMappingsResponse( + isAcknowledged: Boolean, + request: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, ) { - if (indexMonitorV2Request.rbacRoles.stream().anyMatch { !user.backendRoles.contains(it) }) { - log.debug( - "User specified backend roles, ${indexMonitorV2Request.rbacRoles}, " + - "that they don't have access to. User backend roles: ${user.backendRoles}" - ) - validationListener.onFailure( + if (isAcknowledged) { + log.info("Created $SCHEDULED_JOBS_INDEX with mappings.") + prepareMonitorIndexing(request, actionListener, user) + IndexUtils.scheduledJobIndexUpdated() + } else { + log.info("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") + actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( - "User specified backend roles that they don't have access to. Contact administrator", RestStatus.FORBIDDEN - ) - ) - ) - return false - } else if (indexMonitorV2Request.rbacRoles.isEmpty()) { - log.debug( - "Non-admin user are not allowed to specify an empty set of backend roles. " + - "Please don't pass in the parameter or pass in at least one backend role." + "Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged", + RestStatus.INTERNAL_SERVER_ERROR, + ), + ), ) - validationListener.onFailure( + } + } + + private fun onUpdateMappingsResponse( + response: AcknowledgedResponse, + indexMonitorRequest: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, + ) { + if (response.isAcknowledged) { + log.info("Updated $SCHEDULED_JOBS_INDEX with mappings.") + IndexUtils.scheduledJobIndexUpdated() + prepareMonitorIndexing(indexMonitorRequest, actionListener, user) + } else { + log.info("Update $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") + actionListener.onFailure( AlertingException.wrap( OpenSearchStatusException( - "Non-admin user are not allowed to specify an empty set of backend roles.", RestStatus.FORBIDDEN - ) - ) + "Updated $SCHEDULED_JOBS_INDEX mappings call not acknowledged.", + RestStatus.INTERNAL_SERVER_ERROR, + ), + ), ) - return false } } - return true - } + private fun prepareMonitorIndexing( + indexMonitorRequest: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, + ) { + if (indexMonitorRequest.method == RestRequest.Method.PUT) { // update monitor case + scope.launch { + updateMonitor(indexMonitorRequest, actionListener, user) + } + } else { // create monitor case + val query = QueryBuilders.boolQuery().filter(QueryBuilders.existsQuery(MONITOR_V2_TYPE)) + val searchSource = SearchSourceBuilder().query(query).timeout(requestTimeout) + val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX).source(searchSource) + + client.search( + searchRequest, + object : ActionListener { + override fun onResponse(searchResponse: SearchResponse) { + onMonitorCountSearchResponse(searchResponse, indexMonitorRequest, actionListener, user) + } - // if look back window is specified, all the indices that the PPL query searches - // must contain the timestamp field specified in the PPL Monitor, and they must - // all be of OpenSearch data type "date" - private suspend fun checkPplQueryIndicesForTimestampField( - pplSqlMonitor: PPLSQLMonitor, - validationListener: ActionListener - ): Boolean { - if (pplSqlMonitor.lookBackWindow == null) { - // if no look back window was specified, no need - // to check for timestamp field in PPL query indices - return true + override fun onFailure(t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) + } + }, + ) + } } - val pplQuery = pplSqlMonitor.query - val timestampField = pplSqlMonitor.timestampField + // Functions for Update Monitor flow - try { - val indices = getIndicesFromPplQuery(pplQuery) - val getMappingsRequest = GetMappingsRequest().indices(*indices.toTypedArray()) - val getMappingsResponse = client.suspendUntil { admin().indices().getMappings(getMappingsRequest, it) } - - val metadataMap = getMappingsResponse.mappings - - for (index in metadataMap.keys) { - val metadata = metadataMap[index]!!.sourceAsMap["properties"] as Map - if (!metadata.keys.contains(timestampField)) { - validationListener.onFailure( + private suspend fun updateMonitor( + indexMonitorRequest: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, + ) { + val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, indexMonitorRequest.monitorId) + try { + val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } + if (!getResponse.isExists) { + actionListener.onFailure( AlertingException.wrap( - IllegalArgumentException("Query index $index don't contain given timestamp field: $timestampField") - ) + OpenSearchStatusException("MonitorV2 with ${indexMonitorRequest.monitorId} is not found", RestStatus.NOT_FOUND), + ), ) - return false + return } - val typeInfo = metadata[timestampField] as Map - val type = typeInfo["type"] - val dateType = "date" - val dateNanosType = "date_nanos" - if (type != dateType && type != dateNanosType) { - validationListener.onFailure( - AlertingException.wrap( - IllegalArgumentException( - "Timestamp field: $timestampField is present in index $index " + - "but is type $type instead of $dateType or $dateNanosType" - ) - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + getResponse.sourceAsBytesRef, + XContentType.JSON, ) - return false - } - } - } catch (e: Exception) { - log.error("failed to read query indices' fields when checking for timestamp field: $timestampField") - validationListener.onFailure( - AlertingException.wrap( - IllegalArgumentException("failed to read query indices' fields when checking for timestamp field: $timestampField", e) - ) - ) - return false - } + val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) - return true - } + validateMonitorV2(scheduledJob)?.let { + actionListener.onFailure(AlertingException.wrap(it)) + return + } - private fun checkScheduledJobIndex( - indexMonitorRequest: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - // user permissions to indices have already been checked - // proceed without the context of the user, otherwise, - // we would get permissions errors trying to search the alerting-config - // index as the user - client.threadPool().threadContext.stashContext().use { - /* check to see if alerting-config index (scheduled job index) is created and updated before indexing MonitorV2 into it */ - if (!scheduledJobIndices.scheduledJobIndexExists()) { // if alerting-config index doesn't exist, send request to create it - scheduledJobIndices.initScheduledJobIndex(object : ActionListener { - override fun onResponse(response: CreateIndexResponse) { - onCreateMappingsResponse(response.isAcknowledged, indexMonitorRequest, actionListener, user) - } + val monitorV2 = scheduledJob as MonitorV2 - override fun onFailure(e: Exception) { - if (ExceptionsHelper.unwrapCause(e) is ResourceAlreadyExistsException) { - scope.launch { - // Wait for the yellow status - val clusterHealthRequest = ClusterHealthRequest() - .indices(SCHEDULED_JOBS_INDEX) - .waitForYellowStatus() - val response: ClusterHealthResponse = client.suspendUntil { - execute(ClusterHealthAction.INSTANCE, clusterHealthRequest, it) - } - if (response.isTimedOut) { - actionListener.onFailure( - OpenSearchException("Cannot determine that the $SCHEDULED_JOBS_INDEX index is healthy") - ) - } - // Retry mapping of monitor - onCreateMappingsResponse(true, indexMonitorRequest, actionListener, user) - } - } else { - actionListener.onFailure(AlertingException.wrap(e)) - } - } - }) - } else if (!IndexUtils.scheduledJobIndexUpdated) { - IndexUtils.updateIndexMapping( - SCHEDULED_JOBS_INDEX, - ScheduledJobIndices.scheduledJobMappings(), clusterService.state(), client.admin().indices(), - object : ActionListener { - override fun onResponse(response: AcknowledgedResponse) { - onUpdateMappingsResponse(response, indexMonitorRequest, actionListener, user) - } - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) - } - } - ) - } else { - prepareMonitorIndexing(indexMonitorRequest, actionListener, user) + onGetMonitorResponseForUpdate(monitorV2, indexMonitorRequest, actionListener, user) + } catch (e: Exception) { + actionListener.onFailure(AlertingException.wrap(e)) } } - } - private fun onCreateMappingsResponse( - isAcknowledged: Boolean, - request: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - if (isAcknowledged) { - log.info("Created $SCHEDULED_JOBS_INDEX with mappings.") - prepareMonitorIndexing(request, actionListener, user) - IndexUtils.scheduledJobIndexUpdated() - } else { - log.info("Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Create $SCHEDULED_JOBS_INDEX mappings call not acknowledged", RestStatus.INTERNAL_SERVER_ERROR - ) + private suspend fun onGetMonitorResponseForUpdate( + existingMonitorV2: MonitorV2, + indexMonitorRequest: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, + ) { + log.info("user: $user") + log.info("monitor user: ${existingMonitorV2.user}") + if ( + !checkUserPermissionsWithResource( + user, + existingMonitorV2.user, + actionListener, + "monitor_v2", + indexMonitorRequest.monitorId, ) - ) - } - } + ) { + return + } - private fun onUpdateMappingsResponse( - response: AcknowledgedResponse, - indexMonitorRequest: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - if (response.isAcknowledged) { - log.info("Updated $SCHEDULED_JOBS_INDEX with mappings.") - IndexUtils.scheduledJobIndexUpdated() - prepareMonitorIndexing(indexMonitorRequest, actionListener, user) - } else { - log.info("Update $SCHEDULED_JOBS_INDEX mappings call not acknowledged.") - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Updated $SCHEDULED_JOBS_INDEX mappings call not acknowledged.", - RestStatus.INTERNAL_SERVER_ERROR - ) - ) - ) - } - } + var newMonitorV2 = indexMonitorRequest.monitorV2 - private fun prepareMonitorIndexing( - indexMonitorRequest: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - if (indexMonitorRequest.method == RestRequest.Method.PUT) { // update monitor case - scope.launch { - updateMonitor(indexMonitorRequest, actionListener, user) + // If both are enabled, use the current existing monitor enabled time, + // otherwise the next execution will be incorrect. + if (newMonitorV2.enabled && existingMonitorV2.enabled) { + newMonitorV2 = newMonitorV2.makeCopy(enabledTime = existingMonitorV2.enabledTime) } - } else { // create monitor case - val query = QueryBuilders.boolQuery().filter(QueryBuilders.existsQuery(MONITOR_V2_TYPE)) - val searchSource = SearchSourceBuilder().query(query).timeout(requestTimeout) - val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX).source(searchSource) - - client.search( - searchRequest, - object : ActionListener { - override fun onResponse(searchResponse: SearchResponse) { - onMonitorCountSearchResponse(searchResponse, indexMonitorRequest, actionListener, user) - } - override fun onFailure(t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) + /* + * On update monitor check which backend roles to associate to the monitor. + * Below are 2 examples of how the logic works + * + * Example 1, say we have a Monitor with backend roles [a, b, c, d] associated with it. + * If I'm User A (non-admin user) and I have backend roles [a, b, c] associated with me and I make a request to update + * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c] and the roles to add are [a, b]. + * The Monitor's backend roles would then be [a, b, d]. + * + * Example 2, say we have a Monitor with backend roles [a, b, c, d] associated with it. + * If I'm User A (admin user) and I have backend roles [a, b, c] associated with me and I make a request to update + * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c, d] and the roles to add are [a, b]. + * The Monitor's backend roles would then be [a, b]. + */ + if (user != null) { + if (indexMonitorRequest.rbacRoles != null) { + if (isAdmin(user)) { + newMonitorV2 = + newMonitorV2.makeCopy( + user = User(user.name, indexMonitorRequest.rbacRoles, user.roles, user.customAttributes), + ) + } else { + // rolesToRemove: these are the backend roles to remove from the monitor + val rolesToRemove = user.backendRoles - indexMonitorRequest.rbacRoles + // remove the monitor's roles with rolesToRemove and add any roles passed into the request.rbacRoles + val updatedRbac = existingMonitorV2.user?.backendRoles.orEmpty() - rolesToRemove + indexMonitorRequest.rbacRoles + newMonitorV2 = + newMonitorV2.makeCopy( + user = User(user.name, updatedRbac, user.roles, user.customAttributes), + ) } + } else { + newMonitorV2 = + newMonitorV2 + .makeCopy(user = User(user.name, existingMonitorV2.user!!.backendRoles, user.roles, user.customAttributes)) } + log.info("Update monitor backend roles to: ${newMonitorV2.user?.backendRoles}") + } + + newMonitorV2 = newMonitorV2.makeCopy(schemaVersion = IndexUtils.scheduledJobIndexSchemaVersion) + val indexRequest = + IndexRequest(SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(indexMonitorRequest.refreshPolicy) + .source(newMonitorV2.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .id(indexMonitorRequest.monitorId) + .routing(indexMonitorRequest.monitorId) + .timeout(indexTimeout) + + log.info( + "Updating monitor, ${existingMonitorV2.id}, from: ${existingMonitorV2.toXContentWithUser( + jsonBuilder(), + ToXContent.MapParams(mapOf("with_type" to "true")), + )} \n to: ${newMonitorV2.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))}", ) - } - } - /* Functions for Update Monitor flow */ - - private suspend fun updateMonitor( - indexMonitorRequest: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - val getRequest = GetRequest(SCHEDULED_JOBS_INDEX, indexMonitorRequest.monitorId) - try { - val getResponse: GetResponse = client.suspendUntil { client.get(getRequest, it) } - if (!getResponse.isExists) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException("MonitorV2 with ${indexMonitorRequest.monitorId} is not found", RestStatus.NOT_FOUND) + try { + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = IndexUtils.checkShardsFailure(indexResponse) + if (failureReasons != null) { + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), ) - ) - return - } - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - getResponse.sourceAsBytesRef, XContentType.JSON - ) - val scheduledJob = ScheduledJob.parse(xcp, getResponse.id, getResponse.version) + return + } - validateMonitorV2(scheduledJob)?.let { - actionListener.onFailure(AlertingException.wrap(it)) - return + actionListener.onResponse( + IndexMonitorV2Response( + indexResponse.id, + indexResponse.version, + indexResponse.seqNo, + indexResponse.primaryTerm, + newMonitorV2, + ), + ) + } catch (e: Exception) { + actionListener.onFailure(AlertingException.wrap(e)) } - - val monitorV2 = scheduledJob as MonitorV2 - - onGetMonitorResponseForUpdate(monitorV2, indexMonitorRequest, actionListener, user) - } catch (e: Exception) { - actionListener.onFailure(AlertingException.wrap(e)) } - } - private suspend fun onGetMonitorResponseForUpdate( - existingMonitorV2: MonitorV2, - indexMonitorRequest: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - log.info("user: $user") - log.info("monitor user: ${existingMonitorV2.user}") - if ( - !checkUserPermissionsWithResource( - user, - existingMonitorV2.user, - actionListener, - "monitor_v2", - indexMonitorRequest.monitorId - ) - ) { - return - } - - var newMonitorV2 = indexMonitorRequest.monitorV2 - - // If both are enabled, use the current existing monitor enabled time, - // otherwise the next execution will be incorrect. - if (newMonitorV2.enabled && existingMonitorV2.enabled) { - newMonitorV2 = newMonitorV2.makeCopy(enabledTime = existingMonitorV2.enabledTime) - } + // Functions for Create Monitor flow /** - * On update monitor check which backend roles to associate to the monitor. - * Below are 2 examples of how the logic works - * - * Example 1, say we have a Monitor with backend roles [a, b, c, d] associated with it. - * If I'm User A (non-admin user) and I have backend roles [a, b, c] associated with me and I make a request to update - * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c] and the roles to add are [a, b]. - * The Monitor's backend roles would then be [a, b, d]. - * - * Example 2, say we have a Monitor with backend roles [a, b, c, d] associated with it. - * If I'm User A (admin user) and I have backend roles [a, b, c] associated with me and I make a request to update - * the Monitor's backend roles to [a, b]. This would mean that the roles to remove are [c, d] and the roles to add are [a, b]. - * The Monitor's backend roles would then be [a, b]. + * After searching for all existing monitors we validate the system can support another monitor to be created. */ - if (user != null) { - if (indexMonitorRequest.rbacRoles != null) { - if (isAdmin(user)) { - newMonitorV2 = newMonitorV2.makeCopy( - user = User(user.name, indexMonitorRequest.rbacRoles, user.roles, user.customAttributes) - ) - } else { - // rolesToRemove: these are the backend roles to remove from the monitor - val rolesToRemove = user.backendRoles - indexMonitorRequest.rbacRoles - // remove the monitor's roles with rolesToRemove and add any roles passed into the request.rbacRoles - val updatedRbac = existingMonitorV2.user?.backendRoles.orEmpty() - rolesToRemove + indexMonitorRequest.rbacRoles - newMonitorV2 = newMonitorV2.makeCopy( - user = User(user.name, updatedRbac, user.roles, user.customAttributes) - ) - } - } else { - newMonitorV2 = newMonitorV2 - .makeCopy(user = User(user.name, existingMonitorV2.user!!.backendRoles, user.roles, user.customAttributes)) - } - log.info("Update monitor backend roles to: ${newMonitorV2.user?.backendRoles}") - } - - newMonitorV2 = newMonitorV2.makeCopy(schemaVersion = IndexUtils.scheduledJobIndexSchemaVersion) - val indexRequest = IndexRequest(SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(indexMonitorRequest.refreshPolicy) - .source(newMonitorV2.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) - .id(indexMonitorRequest.monitorId) - .routing(indexMonitorRequest.monitorId) - .timeout(indexTimeout) - - log.info( - "Updating monitor, ${existingMonitorV2.id}, from: ${existingMonitorV2.toXContentWithUser( - jsonBuilder(), - ToXContent.MapParams(mapOf("with_type" to "true")) - )} \n to: ${newMonitorV2.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))}" - ) - - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = IndexUtils.checkShardsFailure(indexResponse) - if (failureReasons != null) { + private fun onMonitorCountSearchResponse( + monitorCountSearchResponse: SearchResponse, + indexMonitorRequest: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, + ) { + val totalHits = monitorCountSearchResponse.hits.totalHits?.value + if (totalHits != null && totalHits >= maxMonitors) { + log.info("This request would create more than the allowed monitors [$maxMonitors].") actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())) + AlertingException.wrap( + IllegalArgumentException( + "This request would create more than the allowed monitors [$maxMonitors].", + ), + ), ) - return + } else { + scope.launch { + indexMonitor(indexMonitorRequest, actionListener, user) + } } - - actionListener.onResponse( - IndexMonitorV2Response( - indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, newMonitorV2 - ) - ) - } catch (e: Exception) { - actionListener.onFailure(AlertingException.wrap(e)) } - } - /* Functions for Create Monitor flow */ - - /** - * After searching for all existing monitors we validate the system can support another monitor to be created. - */ - private fun onMonitorCountSearchResponse( - monitorCountSearchResponse: SearchResponse, - indexMonitorRequest: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - val totalHits = monitorCountSearchResponse.hits.totalHits?.value - if (totalHits != null && totalHits >= maxMonitors) { - log.info("This request would create more than the allowed monitors [$maxMonitors].") - actionListener.onFailure( - AlertingException.wrap( - IllegalArgumentException( - "This request would create more than the allowed monitors [$maxMonitors]." + private suspend fun indexMonitor( + indexMonitorRequest: IndexMonitorV2Request, + actionListener: ActionListener, + user: User?, + ) { + var monitorV2 = indexMonitorRequest.monitorV2 + + if (user != null) { + // Use the backend roles which is an intersection of the requested backend roles and the user's backend roles. + // Admins can pass in any backend role. Also if no backend role is passed in, all the user's backend roles are used. + val rbacRoles = + if (indexMonitorRequest.rbacRoles == null) { + user.backendRoles.toSet() + } else if (!isAdmin(user)) { + indexMonitorRequest.rbacRoles.intersect(user.backendRoles).toSet() + } else { + indexMonitorRequest.rbacRoles + } + + monitorV2 = + monitorV2.makeCopy( + user = User(user.name, rbacRoles.toList(), user.roles, user.customAttributes), ) - ) - ) - } else { - scope.launch { - indexMonitor(indexMonitorRequest, actionListener, user) + + log.debug("Created monitor's backend roles: $rbacRoles") } - } - } - private suspend fun indexMonitor( - indexMonitorRequest: IndexMonitorV2Request, - actionListener: ActionListener, - user: User? - ) { - var monitorV2 = indexMonitorRequest.monitorV2 - - if (user != null) { - // Use the backend roles which is an intersection of the requested backend roles and the user's backend roles. - // Admins can pass in any backend role. Also if no backend role is passed in, all the user's backend roles are used. - val rbacRoles = if (indexMonitorRequest.rbacRoles == null) user.backendRoles.toSet() - else if (!isAdmin(user)) indexMonitorRequest.rbacRoles.intersect(user.backendRoles).toSet() - else indexMonitorRequest.rbacRoles - - monitorV2 = monitorV2.makeCopy( - user = User(user.name, rbacRoles.toList(), user.roles, user.customAttributes) + val indexRequest = + IndexRequest(SCHEDULED_JOBS_INDEX) + .setRefreshPolicy(indexMonitorRequest.refreshPolicy) + .source(monitorV2.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) + .routing(indexMonitorRequest.monitorId) + .setIfSeqNo(indexMonitorRequest.seqNo) + .setIfPrimaryTerm(indexMonitorRequest.primaryTerm) + .timeout(indexTimeout) + + log.info( + "Creating new monitorV2: ${monitorV2.toXContentWithUser( + jsonBuilder(), + ToXContent.MapParams(mapOf("with_type" to "true")), + )}", ) - log.debug("Created monitor's backend roles: $rbacRoles") - } + try { + val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } + val failureReasons = IndexUtils.checkShardsFailure(indexResponse) + if (failureReasons != null) { + log.info(failureReasons.toString()) + actionListener.onFailure( + AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())), + ) + return + } - val indexRequest = IndexRequest(SCHEDULED_JOBS_INDEX) - .setRefreshPolicy(indexMonitorRequest.refreshPolicy) - .source(monitorV2.toXContentWithUser(jsonBuilder(), ToXContent.MapParams(mapOf("with_type" to "true")))) - .routing(indexMonitorRequest.monitorId) - .setIfSeqNo(indexMonitorRequest.seqNo) - .setIfPrimaryTerm(indexMonitorRequest.primaryTerm) - .timeout(indexTimeout) - - log.info( - "Creating new monitorV2: ${monitorV2.toXContentWithUser( - jsonBuilder(), - ToXContent.MapParams(mapOf("with_type" to "true")) - )}" - ) - - try { - val indexResponse: IndexResponse = client.suspendUntil { client.index(indexRequest, it) } - val failureReasons = IndexUtils.checkShardsFailure(indexResponse) - if (failureReasons != null) { - log.info(failureReasons.toString()) - actionListener.onFailure( - AlertingException.wrap(OpenSearchStatusException(failureReasons.toString(), indexResponse.status())) + actionListener.onResponse( + IndexMonitorV2Response( + indexResponse.id, + indexResponse.version, + indexResponse.seqNo, + indexResponse.primaryTerm, + monitorV2, + ), ) - return + } catch (t: Exception) { + actionListener.onFailure(AlertingException.wrap(t)) } - - actionListener.onResponse( - IndexMonitorV2Response( - indexResponse.id, indexResponse.version, indexResponse.seqNo, - indexResponse.primaryTerm, monitorV2 - ) - ) - } catch (t: Exception) { - actionListener.onFailure(AlertingException.wrap(t)) } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportSearchMonitorV2Action.kt b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportSearchMonitorV2Action.kt index 6a36e2874..313ed6d29 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportSearchMonitorV2Action.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/transportv2/TransportSearchMonitorV2Action.kt @@ -40,83 +40,96 @@ private val log = LogManager.getLogger(TransportSearchMonitorV2Action::class.jav * * @opensearch.experimental */ -class TransportSearchMonitorV2Action @Inject constructor( - transportService: TransportService, - val settings: Settings, - val client: Client, - clusterService: ClusterService, - actionFilters: ActionFilters, - val namedWriteableRegistry: NamedWriteableRegistry -) : HandledTransportAction( - SearchMonitorV2Action.NAME, transportService, actionFilters, ::SearchMonitorV2Request -), - SecureTransportAction { +class TransportSearchMonitorV2Action + @Inject + constructor( + transportService: TransportService, + val settings: Settings, + val client: Client, + clusterService: ClusterService, + actionFilters: ActionFilters, + val namedWriteableRegistry: NamedWriteableRegistry, + ) : HandledTransportAction( + SearchMonitorV2Action.NAME, + transportService, + actionFilters, + ::SearchMonitorV2Request, + ), + SecureTransportAction { + @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) - @Volatile private var alertingV2Enabled = ALERTING_V2_ENABLED.get(settings) + @Volatile + override var filterByEnabled: Boolean = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - @Volatile - override var filterByEnabled: Boolean = AlertingSettings.FILTER_BY_BACKEND_ROLES.get(settings) - - init { - clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } - listenFilterBySettingChange(clusterService) - } + init { + clusterService.clusterSettings.addSettingsUpdateConsumer(ALERTING_V2_ENABLED) { alertingV2Enabled = it } + listenFilterBySettingChange(clusterService) + } - override fun doExecute(task: Task, request: SearchMonitorV2Request, actionListener: ActionListener) { - if (!alertingV2Enabled) { - actionListener.onFailure( - AlertingException.wrap( - OpenSearchStatusException( - "Alerting V2 is currently disabled, please enable it with the " + - "cluster setting: ${ALERTING_V2_ENABLED.key}", - RestStatus.FORBIDDEN + override fun doExecute( + task: Task, + request: SearchMonitorV2Request, + actionListener: ActionListener, + ) { + if (!alertingV2Enabled) { + actionListener.onFailure( + AlertingException.wrap( + OpenSearchStatusException( + "Alerting V2 is currently disabled, please enable it with the " + + "cluster setting: ${ALERTING_V2_ENABLED.key}", + RestStatus.FORBIDDEN, + ), ), ) - ) - return - } + return + } - val searchSourceBuilder = request.searchRequest.source() + val searchSourceBuilder = request.searchRequest.source() - val queryBuilder = if (searchSourceBuilder.query() == null) BoolQueryBuilder() - else QueryBuilders.boolQuery().must(searchSourceBuilder.query()) + val queryBuilder = + if (searchSourceBuilder.query() == null) { + BoolQueryBuilder() + } else { + QueryBuilders.boolQuery().must(searchSourceBuilder.query()) + } - // filter out MonitorV1s in the alerting config index - // only return MonitorV2s that match the user-given search query - queryBuilder.filter(QueryBuilders.existsQuery(MONITOR_V2_TYPE)) + // filter out MonitorV1s in the alerting config index + // only return MonitorV2s that match the user-given search query + queryBuilder.filter(QueryBuilders.existsQuery(MONITOR_V2_TYPE)) - searchSourceBuilder.query(queryBuilder) - .seqNoAndPrimaryTerm(true) - .version(true) + searchSourceBuilder + .query(queryBuilder) + .seqNoAndPrimaryTerm(true) + .version(true) - val user = readUserFromThreadContext(client) - client.threadPool().threadContext.stashContext().use { - // if user is null, security plugin is disabled or user is super-admin - // if doFilterForUser() is false, security is enabled but filterby is disabled - if (user != null && doFilterForUser(user)) { - log.info("Filtering result by: ${user.backendRoles}") - addFilter(user, request.searchRequest.source(), "$MONITOR_V2_TYPE.$PPL_SQL_MONITOR_TYPE.user.backend_roles.keyword") - } + val user = readUserFromThreadContext(client) + client.threadPool().threadContext.stashContext().use { + // if user is null, security plugin is disabled or user is super-admin + // if doFilterForUser() is false, security is enabled but filterby is disabled + if (user != null && doFilterForUser(user)) { + log.info("Filtering result by: ${user.backendRoles}") + addFilter(user, request.searchRequest.source(), "$MONITOR_V2_TYPE.$PPL_SQL_MONITOR_TYPE.user.backend_roles.keyword") + } - client.search( - request.searchRequest, - object : ActionListener { - override fun onResponse(response: SearchResponse) { - actionListener.onResponse(response) - } + client.search( + request.searchRequest, + object : ActionListener { + override fun onResponse(response: SearchResponse) { + actionListener.onResponse(response) + } - override fun onFailure(e: Exception) { - if (isIndexNotFoundException(e)) { - log.error("Index not found while searching monitor", e) - val emptyResponse = getEmptySearchResponse() - actionListener.onResponse(emptyResponse) - } else { - log.error("Unexpected error while searching monitor", e) - actionListener.onFailure(AlertingException.wrap(e)) + override fun onFailure(e: Exception) { + if (isIndexNotFoundException(e)) { + log.error("Index not found while searching monitor", e) + val emptyResponse = getEmptySearchResponse() + actionListener.onResponse(emptyResponse) + } else { + log.error("Unexpected error while searching monitor", e) + actionListener.onFailure(AlertingException.wrap(e)) + } } - } - } - ) + }, + ) + } } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionParser.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionParser.kt index 835e9b383..38cc3dec4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionParser.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionParser.kt @@ -14,9 +14,8 @@ import org.opensearch.alerting.triggercondition.tokens.TriggerExpressionOperator * @param triggerExpression String containing the trigger expression for the monitor */ class TriggerExpressionParser( - triggerExpression: String + triggerExpression: String, ) : TriggerExpressionRPNBaseParser(triggerExpression) { - override fun parse(): TriggerExpressionRPNResolver { val expression = expressionToParse.replace(" ", "") @@ -30,7 +29,9 @@ class TriggerExpressionParser( breaks[ind].let { if (it.length > 1) { a.addAll(breakString(breaks[ind], s)) - } else a.add(it) + } else { + a.add(it) + } } } breaks.clear() @@ -40,7 +41,10 @@ class TriggerExpressionParser( return TriggerExpressionRPNResolver(convertInfixToPostfix(breaks)) } - private fun breakString(input: String, delimeter: String): ArrayList { + private fun breakString( + input: String, + delimeter: String, + ): ArrayList { val tokens = input.split(delimeter) val array = ArrayList() for (t in tokens) { diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionRPNBaseParser.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionRPNBaseParser.kt index 6dd6bfc36..edf6a0444 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionRPNBaseParser.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/parsers/TriggerExpressionRPNBaseParser.kt @@ -19,7 +19,7 @@ import java.util.Stack * @param expressionToParse Complete string containing the trigger expression */ abstract class TriggerExpressionRPNBaseParser( - protected val expressionToParse: String + protected val expressionToParse: String, ) : ExpressionParser { /** * To perform the Infix-to-postfix conversion of the trigger expression @@ -31,24 +31,32 @@ abstract class TriggerExpressionRPNBaseParser( for (tokenString in expTokens) { if (tokenString.isEmpty()) continue when (val expToken = assignToken(tokenString)) { - is TriggerExpressionToken -> outputExpTokens.add(expToken) + is TriggerExpressionToken -> { + outputExpTokens.add(expToken) + } + is TriggerExpressionOperator -> { when (expToken) { - TriggerExpressionOperator.PAR_LEFT -> expTokenStack.push(expToken) + TriggerExpressionOperator.PAR_LEFT -> { + expTokenStack.push(expToken) + } + TriggerExpressionOperator.PAR_RIGHT -> { var topExpToken = expTokenStack.popExpTokenOrNull() while (topExpToken != null && topExpToken != TriggerExpressionOperator.PAR_LEFT) { outputExpTokens.add(topExpToken) topExpToken = expTokenStack.popExpTokenOrNull() } - if (topExpToken != TriggerExpressionOperator.PAR_LEFT) + if (topExpToken != TriggerExpressionOperator.PAR_LEFT) { throw java.lang.IllegalArgumentException("No matching left parenthesis.") + } } + else -> { var op2 = expTokenStack.peekExpTokenOrNull() while (op2 != null) { val c = expToken.precedence.compareTo(op2.precedence) - if (c < 0 || !expToken.rightAssociative && c <= 0) { + if (c < 0 || (!expToken.rightAssociative && c <= 0)) { outputExpTokens.add(expTokenStack.pop()) } else { break @@ -64,8 +72,9 @@ abstract class TriggerExpressionRPNBaseParser( while (!expTokenStack.isEmpty()) { expTokenStack.peekExpTokenOrNull()?.let { - if (it == TriggerExpressionOperator.PAR_LEFT) + if (it == TriggerExpressionOperator.PAR_LEFT) { throw java.lang.IllegalArgumentException("No matching right parenthesis.") + } } val top = expTokenStack.pop() outputExpTokens.add(top) @@ -78,10 +87,10 @@ abstract class TriggerExpressionRPNBaseParser( * Looks up and maps the expression token that matches the string version of that expression unit */ private fun assignToken(tokenString: String): ExpressionToken { - // Check "query" string in trigger expression such as in 'query[name="abc"]' - if (tokenString.startsWith(TriggerExpressionConstant.ConstantType.QUERY.ident)) + if (tokenString.startsWith(TriggerExpressionConstant.ConstantType.QUERY.ident)) { return TriggerExpressionToken(tokenString) + } // Check operators in trigger expression such as in [&&, ||, !] for (op in TriggerExpressionOperator.values()) { @@ -96,19 +105,17 @@ abstract class TriggerExpressionRPNBaseParser( throw IllegalArgumentException("Error while processing the trigger expression '$tokenString'") } - private inline fun Stack.popExpTokenOrNull(): T? { - return try { + private inline fun Stack.popExpTokenOrNull(): T? = + try { pop() as T } catch (e: java.lang.Exception) { null } - } - private inline fun Stack.peekExpTokenOrNull(): T? { - return try { + private inline fun Stack.peekExpTokenOrNull(): T? = + try { peek() as T } catch (e: java.lang.Exception) { null } - } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpression.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpression.kt index 2a3e6c1ff..a9397bb2e 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpression.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpression.kt @@ -6,27 +6,41 @@ package org.opensearch.alerting.triggercondition.resolvers sealed class TriggerExpression { + fun resolve(): Set = + when (this) { + is And -> resolveAnd(docSet1, docSet2) + is Or -> resolveOr(docSet1, docSet2) + is Not -> resolveNot(allDocs, docSet2) + } - fun resolve(): Set = when (this) { - is And -> resolveAnd(docSet1, docSet2) - is Or -> resolveOr(docSet1, docSet2) - is Not -> resolveNot(allDocs, docSet2) - } + private fun resolveAnd( + documentSet1: Set, + documentSet2: Set, + ): Set = documentSet1.intersect(documentSet2) - private fun resolveAnd(documentSet1: Set, documentSet2: Set): Set { - return documentSet1.intersect(documentSet2) - } + private fun resolveOr( + documentSet1: Set, + documentSet2: Set, + ): Set = documentSet1.union(documentSet2) - private fun resolveOr(documentSet1: Set, documentSet2: Set): Set { - return documentSet1.union(documentSet2) - } - - private fun resolveNot(allDocs: Set, documentSet2: Set): Set { - return allDocs.subtract(documentSet2) - } + private fun resolveNot( + allDocs: Set, + documentSet2: Set, + ): Set = allDocs.subtract(documentSet2) // Operators implemented as operator functions - class And(val docSet1: Set, val docSet2: Set) : TriggerExpression() - class Or(val docSet1: Set, val docSet2: Set) : TriggerExpression() - class Not(val allDocs: Set, val docSet2: Set) : TriggerExpression() + class And( + val docSet1: Set, + val docSet2: Set, + ) : TriggerExpression() + + class Or( + val docSet1: Set, + val docSet2: Set, + ) : TriggerExpression() + + class Not( + val allDocs: Set, + val docSet2: Set, + ) : TriggerExpression() } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt index 45937c8ab..bb1c96668 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/resolvers/TriggerExpressionRPNResolver.kt @@ -18,9 +18,8 @@ import java.util.Stack * @param polishNotation an array of expression tokens organized in the RPN order */ class TriggerExpressionRPNResolver( - private val polishNotation: ArrayList + private val polishNotation: ArrayList, ) : TriggerExpressionResolver { - private val eqString by lazy { val stringBuilder = StringBuilder() for (expToken in polishNotation) { @@ -52,15 +51,19 @@ class TriggerExpressionRPNResolver( for (expToken in polishNotation) { when (expToken) { - is TriggerExpressionToken -> tokenStack.push(resolveQueryExpression(expToken.value, queryToDocIds)) + is TriggerExpressionToken -> { + tokenStack.push(resolveQueryExpression(expToken.value, queryToDocIds)) + } + is TriggerExpressionOperator -> { val right = tokenStack.pop() - val expr = when (expToken) { - TriggerExpressionOperator.AND -> TriggerExpression.And(tokenStack.pop(), right) - TriggerExpressionOperator.OR -> TriggerExpression.Or(tokenStack.pop(), right) - TriggerExpressionOperator.NOT -> TriggerExpression.Not(allDocIds, right) - else -> throw IllegalArgumentException("No matching operator.") - } + val expr = + when (expToken) { + TriggerExpressionOperator.AND -> TriggerExpression.And(tokenStack.pop(), right) + TriggerExpressionOperator.OR -> TriggerExpression.Or(tokenStack.pop(), right) + TriggerExpressionOperator.NOT -> TriggerExpression.Not(allDocIds, right) + else -> throw IllegalArgumentException("No matching operator.") + } tokenStack.push(expr.resolve()) } } @@ -68,10 +71,15 @@ class TriggerExpressionRPNResolver( return tokenStack.pop() } - private fun resolveQueryExpression(queryExpString: String, queryToDocIds: Map>): Set { + private fun resolveQueryExpression( + queryExpString: String, + queryToDocIds: Map>, + ): Set { if (!queryExpString.startsWith(TriggerExpressionConstant.ConstantType.QUERY.ident)) return emptySet() - val token = queryExpString.substringAfter(TriggerExpressionConstant.ConstantType.BRACKET_LEFT.ident) - .substringBefore(TriggerExpressionConstant.ConstantType.BRACKET_RIGHT.ident) + val token = + queryExpString + .substringAfter(TriggerExpressionConstant.ConstantType.BRACKET_LEFT.ident) + .substringBefore(TriggerExpressionConstant.ConstantType.BRACKET_RIGHT.ident) if (token.isEmpty()) return emptySet() val tokens = token.split(TriggerExpressionConstant.ConstantType.EQUALS.ident) @@ -82,12 +90,20 @@ class TriggerExpressionRPNResolver( val documents = mutableSetOf() when (identifier) { TriggerExpressionConstant.ConstantType.NAME.ident -> { - val key: Optional = queryToDocIds.keys.stream().filter { it.name == value }.findFirst() + val key: Optional = + queryToDocIds.keys + .stream() + .filter { it.name == value } + .findFirst() if (key.isPresent) queryToDocIds[key.get()]?.let { doc -> documents.addAll(doc) } } TriggerExpressionConstant.ConstantType.ID.ident -> { - val key: Optional = queryToDocIds.keys.stream().filter { it.id == value }.findFirst() + val key: Optional = + queryToDocIds.keys + .stream() + .filter { it.id == value } + .findFirst() if (key.isPresent) queryToDocIds[key.get()]?.let { doc -> documents.addAll(doc) } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionConstant.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionConstant.kt index 80e662a21..7e5ec1d4d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionConstant.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionConstant.kt @@ -9,9 +9,12 @@ package org.opensearch.alerting.triggercondition.tokens * To define all the tokens which could be part of expression constant such as query[id=new_id], query[name=new_name], * query[tag=new_tag] */ -class TriggerExpressionConstant(val type: ConstantType) : ExpressionToken { - - enum class ConstantType(val ident: String) { +class TriggerExpressionConstant( + val type: ConstantType, +) : ExpressionToken { + enum class ConstantType( + val ident: String, + ) { QUERY("query"), TAG("tag"), @@ -21,6 +24,6 @@ class TriggerExpressionConstant(val type: ConstantType) : ExpressionToken { BRACKET_LEFT("["), BRACKET_RIGHT("]"), - EQUALS("=") + EQUALS("="), } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionOperator.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionOperator.kt index de3c4a0df..0fb09990e 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionOperator.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionOperator.kt @@ -8,13 +8,16 @@ package org.opensearch.alerting.triggercondition.tokens /** * To define all the operators used in the trigger expression */ -enum class TriggerExpressionOperator(val value: String, val precedence: Int, val rightAssociative: Boolean) : ExpressionToken { - +enum class TriggerExpressionOperator( + val value: String, + val precedence: Int, + val rightAssociative: Boolean, +) : ExpressionToken { AND("&&", 2, false), OR("||", 2, false), NOT("!", 3, true), PAR_LEFT("(", 1, false), - PAR_RIGHT(")", 1, false) + PAR_RIGHT(")", 1, false), } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionToken.kt b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionToken.kt index 808f7737d..b9f4f6d1f 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionToken.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/triggercondition/tokens/TriggerExpressionToken.kt @@ -8,4 +8,6 @@ package org.opensearch.alerting.triggercondition.tokens /** * To define the tokens in Trigger expression such as query[tag=“sev1"] or query[name=“sev1"] or query[id=“sev1"] */ -internal data class TriggerExpressionToken(val value: String) : ExpressionToken +internal data class TriggerExpressionToken( + val value: String, +) : ExpressionToken diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt index 74cedd59a..41aaf985f 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AggregationQueryRewriter.kt @@ -24,15 +24,16 @@ import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.search.sort.SortOrder class AggregationQueryRewriter { - companion object { /** * Add the bucket selector conditions for each trigger in input query. It also adds afterKeys from previous result * for each trigger. */ - fun rewriteQuery(query: SearchSourceBuilder, prevResult: InputRunResults?, triggers: List): SearchSourceBuilder { - return rewriteQuery(query, prevResult, triggers, false) - } + fun rewriteQuery( + query: SearchSourceBuilder, + prevResult: InputRunResults?, + triggers: List, + ): SearchSourceBuilder = rewriteQuery(query, prevResult, triggers, false) /** * Optionally adds support for returning sample documents for each bucket of data returned for a bucket level monitor. @@ -41,7 +42,7 @@ class AggregationQueryRewriter { query: SearchSourceBuilder, prevResult: InputRunResults?, triggers: List, - returnSampleDocs: Boolean = false + returnSampleDocs: Boolean = false, ): SearchSourceBuilder { triggers.forEach { trigger -> if (trigger is BucketLevelTrigger) { @@ -74,13 +75,15 @@ class AggregationQueryRewriter { val docFieldTags = parseSampleDocTags(listOf(trigger)) val sampleDocsAgg = getSampleDocAggs(factory) sampleDocsAgg.forEach { agg -> - if (docFieldTags.isNotEmpty()) agg.fetchSource( - FetchSourceContext( - true, - docFieldTags.toTypedArray(), - emptyArray() + if (docFieldTags.isNotEmpty()) { + agg.fetchSource( + FetchSourceContext( + true, + docFieldTags.toTypedArray(), + emptyArray(), + ), ) - ) + } if (!factory.subAggregations.contains(agg)) factory.subAggregation(agg) } } else { @@ -105,7 +108,7 @@ class AggregationQueryRewriter { fun getAfterKeysFromSearchResponse( searchResponse: SearchResponse, triggers: List, - prevBucketLevelTriggerAfterKeys: Map? + prevBucketLevelTriggerAfterKeys: Map?, ): Map { val bucketLevelTriggerAfterKeys = mutableMapOf() triggers.forEach { trigger -> @@ -130,17 +133,21 @@ class AggregationQueryRewriter { */ val afterKey = lastAgg.afterKey() val prevTriggerAfterKey = prevBucketLevelTriggerAfterKeys?.get(trigger.id) - bucketLevelTriggerAfterKeys[trigger.id] = when { - // If the previous TriggerAfterKey was null, this should be the first page - prevTriggerAfterKey == null -> TriggerAfterKey(afterKey, afterKey == null) - // If the previous TriggerAfterKey already hit the last page, pass along the after key it used to get there - prevTriggerAfterKey.lastPage -> prevTriggerAfterKey - // If the previous TriggerAfterKey had not reached the last page and the after key for the current result - // is null, then the last page has been reached so the after key that was used to get there is stored - afterKey == null -> TriggerAfterKey(prevTriggerAfterKey.afterKey, true) - // Otherwise, update the after key to the current one - else -> TriggerAfterKey(afterKey, false) - } + bucketLevelTriggerAfterKeys[trigger.id] = + when { + // If the previous TriggerAfterKey was null, this should be the first page + prevTriggerAfterKey == null -> TriggerAfterKey(afterKey, afterKey == null) + + // If the previous TriggerAfterKey already hit the last page, pass along the after key it used to get there + prevTriggerAfterKey.lastPage -> prevTriggerAfterKey + + // If the previous TriggerAfterKey had not reached the last page and the after key for the current result + // is null, then the last page has been reached so the after key that was used to get there is stored + afterKey == null -> TriggerAfterKey(prevTriggerAfterKey.afterKey, true) + + // Otherwise, update the after key to the current one + else -> TriggerAfterKey(afterKey, false) + } } } } @@ -150,12 +157,13 @@ class AggregationQueryRewriter { @Suppress("UNCHECKED_CAST") private fun getSampleDocAggs(factory: CompositeAggregationBuilder): List { var defaultSortFields = listOf("_score") - val aggregations = factory.subAggregations.flatMap { - (it.convertToMap()[it.name] as Map).values.flatMap { field -> - field as Map - field.values + val aggregations = + factory.subAggregations.flatMap { + (it.convertToMap()[it.name] as Map).values.flatMap { field -> + field as Map + field.values + } } - } if (aggregations.isNotEmpty()) defaultSortFields = aggregations val lowHitsAgg = AggregationBuilders.topHits("low_hits").size(5) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt index 1bb92e838..68adeaef5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AlertingUtils.kt @@ -40,20 +40,25 @@ val MAX_SEARCH_SIZE = 10000 * Regex was based off of this post: https://stackoverflow.com/a/201378 */ fun isValidEmail(email: String): Boolean { - val validEmailPattern = Regex( - "(?:[a-z0-9!#\$%&'*+\\/=?^_`{|}~-]+(?:\\.[a-z0-9!#\$%&'*+\\/=?^_`{|}~-]+)*" + - "|\"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21\\x23-\\x5b\\x5d-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*\")" + - "@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?" + - "|\\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\\.){3}" + - "(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:" + - "(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21-\\x5a\\x53-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)\\])", - RegexOption.IGNORE_CASE - ) + val validEmailPattern = + Regex( + "(?:[a-z0-9!#\$%&'*+\\/=?^_`{|}~-]+(?:\\.[a-z0-9!#\$%&'*+\\/=?^_`{|}~-]+)*" + + "|\"(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21\\x23-\\x5b\\x5d-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])*\")" + + "@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?" + + "|\\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\\.){3}" + + "(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:" + + "(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x21-\\x5a\\x53-\\x7f]|\\\\[\\x01-\\x09\\x0b\\x0c\\x0e-\\x7f])+)\\])", + RegexOption.IGNORE_CASE, + ) return validEmailPattern.matches(email) } -fun getRoleFilterEnabled(clusterService: ClusterService, settings: Settings, settingPath: String): Boolean { +fun getRoleFilterEnabled( + clusterService: ClusterService, + settings: Settings, + settingPath: String, +): Boolean { var adBackendRoleFilterEnabled: Boolean val metaData = clusterService.state().metadata() @@ -111,7 +116,7 @@ fun Action.getActionExecutionPolicy(monitor: Monitor): ActionExecutionPolicy? { } fun BucketLevelTriggerRunResult.getCombinedTriggerRunResult( - prevTriggerRunResult: BucketLevelTriggerRunResult? + prevTriggerRunResult: BucketLevelTriggerRunResult?, ): BucketLevelTriggerRunResult { if (prevTriggerRunResult == null) return this @@ -131,13 +136,13 @@ fun defaultToPerExecutionAction( monitorId: String, triggerId: String, totalActionableAlertCount: Int, - monitorOrTriggerError: Exception? + monitorOrTriggerError: Exception?, ): Boolean { // If the monitorId or triggerResult has an error, then also default to PER_EXECUTION to communicate the error if (monitorOrTriggerError != null) { logger.debug( "Trigger [$triggerId] in monitor [$monitorId] encountered an error. Defaulting to " + - "[${ActionExecutionScope.Type.PER_EXECUTION}] for action execution to communicate error." + "[${ActionExecutionScope.Type.PER_EXECUTION}] for action execution to communicate error.", ) return true } @@ -151,7 +156,7 @@ fun defaultToPerExecutionAction( logger.debug( "The total actionable alerts for trigger [$triggerId] in monitor [$monitorId] is [$totalActionableAlertCount] " + "which exceeds the maximum of [$maxActionableAlertCount]. " + - "Defaulting to [${ActionExecutionScope.Type.PER_EXECUTION}] for action execution." + "Defaulting to [${ActionExecutionScope.Type.PER_EXECUTION}] for action execution.", ) return true } @@ -197,14 +202,20 @@ fun getCancelAfterTimeInterval(): Long { * * The suppressed exception is added to the list of suppressed exceptions of [cause] exception. */ -fun ThreadContext.StoredContext.closeFinally(cause: Throwable?) = when (cause) { - null -> close() - else -> try { - close() - } catch (closeException: Throwable) { - cause.addSuppressed(closeException) +fun ThreadContext.StoredContext.closeFinally(cause: Throwable?) = + when (cause) { + null -> { + close() + } + + else -> { + try { + close() + } catch (closeException: Throwable) { + cause.addSuppressed(closeException) + } + } } -} /** * Mustache template supports iterating through a list using a `{{#listVariable}}{{/listVariable}}` block. @@ -229,10 +240,12 @@ fun parseSampleDocTags(messageTemplate: Script): Set { // Sample start/end of -1 indicates there are no more complete sample blocks while (blockStart != -1 && blockEnd != -1) { // Isolate the sample block - val sampleBlock = messageTemplate.idOrCode.substring(blockStart, blockEnd) - // Remove the iteration wrapper tags - .removePrefix(sampleBlockPrefix) - .removeSuffix(sampleBlockSuffix) + val sampleBlock = + messageTemplate.idOrCode + .substring(blockStart, blockEnd) + // Remove the iteration wrapper tags + .removePrefix(sampleBlockPrefix) + .removeSuffix(sampleBlockSuffix) // Search for each tag tagRegex.findAll(sampleBlock).forEach { match -> @@ -254,11 +267,11 @@ fun parseSampleDocTags(messageTemplate: Script): Set { return tags } -fun parseSampleDocTags(triggers: List): Set { - return triggers.flatMap { trigger -> - trigger.actions.flatMap { action -> parseSampleDocTags(action.messageTemplate) } - }.toSet() -} +fun parseSampleDocTags(triggers: List): Set = + triggers + .flatMap { trigger -> + trigger.actions.flatMap { action -> parseSampleDocTags(action.messageTemplate) } + }.toSet() /** * Checks the `message_template.source` in the [Script] for each [Action] in the [Trigger] for @@ -267,23 +280,27 @@ fun parseSampleDocTags(triggers: List): Set { */ fun printsSampleDocData(trigger: Trigger): Boolean { return trigger.actions.any { action -> - val alertsField = when (trigger) { - is BucketLevelTrigger -> "{{ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}}" - is DocumentLevelTrigger -> "{{ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}}" - // Only bucket, and document level monitors are supported currently. - else -> return false - } + val alertsField = + when (trigger) { + is BucketLevelTrigger -> "{{ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}}" + + is DocumentLevelTrigger -> "{{ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}}" + + // Only bucket, and document level monitors are supported currently. + else -> return false + } // TODO: Consider excluding the following tags from TRUE criteria (especially for bucket-level triggers) as // printing all of the sample documents could make the notification message too large to send. // 1. {{ctx}} - prints entire ctx object in the message string // 2. {{ctx.}} - prints entire alerts array in the message string, which includes the sample docs // 3. {{AlertContext.SAMPLE_DOCS_FIELD}} - prints entire sample docs array in the message string - val validTags = listOfNotNull( - "{{ctx}}", - alertsField, - AlertContext.SAMPLE_DOCS_FIELD - ) + val validTags = + listOfNotNull( + "{{ctx}}", + alertsField, + AlertContext.SAMPLE_DOCS_FIELD, + ) validTags.any { tag -> action.messageTemplate.idOrCode.contains(tag) } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt index e83f45a15..e2b0e8ee1 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtils.kt @@ -32,7 +32,10 @@ fun isADMonitor(monitor: Monitor): Boolean { return false } -fun addUserBackendRolesFilter(user: User?, searchSourceBuilder: SearchSourceBuilder): SearchSourceBuilder { +fun addUserBackendRolesFilter( + user: User?, + searchSourceBuilder: SearchSourceBuilder, +): SearchSourceBuilder { var boolQueryBuilder = BoolQueryBuilder() val userFieldName = "user" val userBackendRoleFieldName = "user.backend_roles.keyword" diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/CommentsUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/CommentsUtils.kt index d24959452..db41383eb 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/CommentsUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/CommentsUtils.kt @@ -34,7 +34,10 @@ class CommentsUtils { companion object { // Searches through all Comments history indices and returns a list of all Comments associated // with the Entities given by the list of Entity IDs - suspend fun getCommentsByAlertIDs(client: Client, alertIDs: List): List { + suspend fun getCommentsByAlertIDs( + client: Client, + alertIDs: List, + ): List { val queryBuilder = QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("entity_id", alertIDs)) val searchSourceBuilder = SearchSourceBuilder() @@ -48,23 +51,28 @@ class CommentsUtils { .source(searchSourceBuilder) val searchResponse: SearchResponse = client.suspendUntil { search(searchRequest, it) } - val comments = searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val comment = Comment.parse(xcp, hit.id) - comment - } + val comments = + searchResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val comment = Comment.parse(xcp, hit.id) + comment + } return comments } // Identical to getCommentsByAlertIDs, just returns list of Comment IDs instead of list of Comment objects - suspend fun getCommentIDsByAlertIDs(client: Client, alertIDs: List): List { + suspend fun getCommentIDsByAlertIDs( + client: Client, + alertIDs: List, + ): List { val comments = getCommentsByAlertIDs(client, alertIDs) return comments.map { it.id } } @@ -76,7 +84,7 @@ class CommentsUtils { suspend fun getCommentsForAlertNotification( client: Client, alertIds: List, - maxComments: Int + maxComments: Int, ): Map> { val allComments = getCommentsByAlertIDs(client, alertIds) val sortedComments = allComments.sortedByDescending { it.createdTime } @@ -93,20 +101,25 @@ class CommentsUtils { } // Deletes all Comments given by the list of Comments IDs - suspend fun deleteComments(client: Client, commentIDs: List) { + suspend fun deleteComments( + client: Client, + commentIDs: List, + ) { if (commentIDs.isEmpty()) return - val deleteResponse: BulkByScrollResponse = suspendCoroutine { cont -> - DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) - .source(ALL_COMMENTS_INDEX_PATTERN) - .filter(QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("_id", commentIDs))) - .refresh(true) - .execute( - object : ActionListener { - override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) - override fun onFailure(t: Exception) = cont.resumeWithException(t) - } - ) - } + val deleteResponse: BulkByScrollResponse = + suspendCoroutine { cont -> + DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + .source(ALL_COMMENTS_INDEX_PATTERN) + .filter(QueryBuilders.boolQuery().must(QueryBuilders.termsQuery("_id", commentIDs))) + .refresh(true) + .execute( + object : ActionListener { + override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) + + override fun onFailure(t: Exception) = cont.resumeWithException(t) + }, + ) + } deleteResponse.bulkFailures.forEach { log.error("Failed to delete Comment. Comment ID: [${it.id}] cause: [${it.cause}] ") } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/CrossClusterMonitorUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/CrossClusterMonitorUtils.kt index ade8494f0..b41a86067 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/CrossClusterMonitorUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/CrossClusterMonitorUtils.kt @@ -16,7 +16,6 @@ import org.opensearch.transport.client.node.NodeClient class CrossClusterMonitorUtils { companion object { - /** * Uses the monitor inputs to determine whether the monitor makes calls to remote clusters. * @param monitor The monitor to evaluate. @@ -24,7 +23,10 @@ class CrossClusterMonitorUtils { * @return TRUE if the monitor makes calls to remote clusters; otherwise returns FALSE. */ @JvmStatic - fun isRemoteMonitor(monitor: Monitor, localClusterName: String): Boolean { + fun isRemoteMonitor( + monitor: Monitor, + localClusterName: String, + ): Boolean { var isRemoteMonitor = false monitor.inputs.forEach inputCheck@{ when (it) { @@ -36,6 +38,7 @@ class CrossClusterMonitorUtils { } } } + is SearchInput -> { // Remote indexes follow the pattern ":". // Index entries without a CLUSTER_NAME indicate they're store on the local cluster. @@ -47,10 +50,12 @@ class CrossClusterMonitorUtils { } } } + is DocLevelMonitorInput -> { // TODO: When document level monitors are supported, this check will be similar to SearchInput. throw IllegalArgumentException("Per document monitors do not currently support cross-cluster search.") } + else -> { throw IllegalArgumentException("Unsupported input type: ${it.name()}.") } @@ -66,9 +71,10 @@ class CrossClusterMonitorUtils { * @return TRUE if the monitor makes calls to remote clusters; otherwise returns FALSE. */ @JvmStatic - fun isRemoteMonitor(monitor: Monitor, clusterService: ClusterService): Boolean { - return isRemoteMonitor(monitor = monitor, localClusterName = clusterService.clusterName.value()) - } + fun isRemoteMonitor( + monitor: Monitor, + clusterService: ClusterService, + ): Boolean = isRemoteMonitor(monitor = monitor, localClusterName = clusterService.clusterName.value()) /** * Parses the list of indexes into a map of CLUSTER_NAME to List. @@ -77,7 +83,10 @@ class CrossClusterMonitorUtils { * @return A map of CLUSTER_NAME to List */ @JvmStatic - fun separateClusterIndexes(indexes: List, localClusterName: String): HashMap> { + fun separateClusterIndexes( + indexes: List, + localClusterName: String, + ): HashMap> { val output = hashMapOf>() indexes.forEach { index -> var clusterName = parseClusterName(index) @@ -99,9 +108,11 @@ class CrossClusterMonitorUtils { * @return A map of CLUSTER_NAME to List */ @JvmStatic - fun separateClusterIndexes(indexes: List, clusterService: ClusterService): HashMap> { - return separateClusterIndexes(indexes = indexes, localClusterName = clusterService.clusterName.value()) - } + fun separateClusterIndexes( + indexes: List, + clusterService: ClusterService, + ): HashMap> = + separateClusterIndexes(indexes = indexes, localClusterName = clusterService.clusterName.value()) /** * The [NodeClient] used by the plugin cannot execute searches against local indexes @@ -113,8 +124,11 @@ class CrossClusterMonitorUtils { * and any local indexes in "" format. */ @JvmStatic - fun parseIndexesForRemoteSearch(indexes: List, localClusterName: String): List { - return indexes.map { + fun parseIndexesForRemoteSearch( + indexes: List, + localClusterName: String, + ): List = + indexes.map { var index = it val clusterName = parseClusterName(it) if (clusterName.isNotEmpty() && clusterName == localClusterName) { @@ -122,7 +136,6 @@ class CrossClusterMonitorUtils { } index } - } /** * The [NodeClient] used by the plugin cannot execute searches against local indexes @@ -134,9 +147,10 @@ class CrossClusterMonitorUtils { * and any local indexes in "" format. */ @JvmStatic - fun parseIndexesForRemoteSearch(indexes: List, clusterService: ClusterService): List { - return parseIndexesForRemoteSearch(indexes = indexes, localClusterName = clusterService.clusterName.value()) - } + fun parseIndexesForRemoteSearch( + indexes: List, + clusterService: ClusterService, + ): List = parseIndexesForRemoteSearch(indexes = indexes, localClusterName = clusterService.clusterName.value()) /** * Uses the clusterName to determine whether the target client is the local or a remote client, @@ -147,9 +161,11 @@ class CrossClusterMonitorUtils { * @return The local [NodeClient] for the local cluster, or a remote client for a remote cluster. */ @JvmStatic - fun getClientForCluster(clusterName: String, client: Client, localClusterName: String): Client { - return if (clusterName == localClusterName) client else client.getRemoteClusterClient(clusterName) - } + fun getClientForCluster( + clusterName: String, + client: Client, + localClusterName: String, + ): Client = if (clusterName == localClusterName) client else client.getRemoteClusterClient(clusterName) /** * Uses the clusterName to determine whether the target client is the local or a remote client, @@ -160,9 +176,11 @@ class CrossClusterMonitorUtils { * @return The local [NodeClient] for the local cluster, or a remote client for a remote cluster. */ @JvmStatic - fun getClientForCluster(clusterName: String, client: Client, clusterService: ClusterService): Client { - return getClientForCluster(clusterName = clusterName, client = client, localClusterName = clusterService.clusterName.value()) - } + fun getClientForCluster( + clusterName: String, + client: Client, + clusterService: ClusterService, + ): Client = getClientForCluster(clusterName = clusterName, client = client, localClusterName = clusterService.clusterName.value()) /** * Uses the index name to determine whether the target client is the local or a remote client, @@ -174,10 +192,17 @@ class CrossClusterMonitorUtils { * @return The local [NodeClient] for the local cluster, or a remote client for a remote cluster. */ @JvmStatic - fun getClientForIndex(index: String, client: Client, localClusterName: String): Client { + fun getClientForIndex( + index: String, + client: Client, + localClusterName: String, + ): Client { val clusterName = parseClusterName(index) - return if (clusterName.isNotEmpty() && clusterName != localClusterName) - client.getRemoteClusterClient(clusterName) else client + return if (clusterName.isNotEmpty() && clusterName != localClusterName) { + client.getRemoteClusterClient(clusterName) + } else { + client + } } /** @@ -190,9 +215,11 @@ class CrossClusterMonitorUtils { * @return The local [NodeClient] for the local cluster, or a remote client for a remote cluster. */ @JvmStatic - fun getClientForIndex(index: String, client: Client, clusterService: ClusterService): Client { - return getClientForIndex(index = index, client = client, localClusterName = clusterService.clusterName.value()) - } + fun getClientForIndex( + index: String, + client: Client, + clusterService: ClusterService, + ): Client = getClientForIndex(index = index, client = client, localClusterName = clusterService.clusterName.value()) /** * @param index The name of the index to evaluate. @@ -200,10 +227,12 @@ class CrossClusterMonitorUtils { * @return The cluster name if present; else an empty string. */ @JvmStatic - fun parseClusterName(index: String): String { - return if (index.contains(":")) index.split(":").getOrElse(0) { "" } - else "" - } + fun parseClusterName(index: String): String = + if (index.contains(":")) { + index.split(":").getOrElse(0) { "" } + } else { + "" + } /** * @param index The name of the index to evaluate. @@ -211,10 +240,12 @@ class CrossClusterMonitorUtils { * @return The index name. */ @JvmStatic - fun parseIndexName(index: String): String { - return if (index.contains(":")) index.split(":").getOrElse(1) { index } - else index - } + fun parseIndexName(index: String): String = + if (index.contains(":")) { + index.split(":").getOrElse(1) { index } + } else { + index + } /** * If clusterName is provided, combines the inputs into ":" format. @@ -223,12 +254,20 @@ class CrossClusterMonitorUtils { * @return The formatted string. */ @JvmStatic - fun formatClusterAndIndexName(clusterName: String, indexName: String): String { - return if (clusterName.isNotEmpty()) "$clusterName:$indexName" - else indexName - } + fun formatClusterAndIndexName( + clusterName: String, + indexName: String, + ): String = + if (clusterName.isNotEmpty()) { + "$clusterName:$indexName" + } else { + indexName + } - fun isRemoteClusterIndex(index: String, clusterService: ClusterService): Boolean { + fun isRemoteClusterIndex( + index: String, + clusterService: ClusterService, + ): Boolean { val clusterName = parseClusterName(index) return clusterName.isNotEmpty() && clusterService.clusterName.value() != clusterName } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/DestinationType.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/DestinationType.kt index d714288f8..634ed4565 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/DestinationType.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/DestinationType.kt @@ -5,14 +5,15 @@ package org.opensearch.alerting.util -enum class DestinationType(val value: String) { +enum class DestinationType( + val value: String, +) { CHIME("chime"), SLACK("slack"), CUSTOM_WEBHOOK("custom_webhook"), EMAIL("email"), - TEST_ACTION("test_action"); + TEST_ACTION("test_action"), + ; - override fun toString(): String { - return value - } + override fun toString(): String = value } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt index 5173ff897..cbf06654b 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/DocLevelMonitorQueries.kt @@ -56,24 +56,32 @@ import kotlin.coroutines.suspendCoroutine private val log = LogManager.getLogger(DocLevelMonitorQueries::class.java) -class DocLevelMonitorQueries(private val client: Client, private val clusterService: ClusterService) { +class DocLevelMonitorQueries( + private val client: Client, + private val clusterService: ClusterService, +) { companion object { - const val PROPERTIES = "properties" const val NESTED = "nested" const val TYPE = "type" const val INDEX_PATTERN_SUFFIX = "-000001" const val QUERY_INDEX_BASE_FIELDS_COUNT = 8 // 3 fields we defined and 5 builtin additional metadata fields + @JvmStatic - fun docLevelQueriesMappings(): String { - return DocLevelMonitorQueries::class.java.classLoader.getResource("mappings/doc-level-queries.json").readText() - } - fun docLevelQueriesSettings(): Settings { - return Settings.builder().loadFromSource( - DocLevelMonitorQueries::class.java.classLoader.getResource("settings/doc-level-queries.json").readText(), - XContentType.JSON - ).build() - } + fun docLevelQueriesMappings(): String = + DocLevelMonitorQueries::class.java.classLoader + .getResource("mappings/doc-level-queries.json") + .readText() + + fun docLevelQueriesSettings(): Settings = + Settings + .builder() + .loadFromSource( + DocLevelMonitorQueries::class.java.classLoader + .getResource("settings/doc-level-queries.json") + .readText(), + XContentType.JSON, + ).build() } suspend fun initDocLevelQueryIndex(): Boolean { @@ -81,9 +89,10 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ // Since we changed queryIndex to be alias now, for backwards compatibility, we have to delete index with same name // as our alias, to avoid name clash. if (clusterService.state().metadata.hasIndex(ScheduledJob.DOC_LEVEL_QUERIES_INDEX)) { - val acknowledgedResponse: AcknowledgedResponse = client.suspendUntil { - admin().indices().delete(DeleteIndexRequest(ScheduledJob.DOC_LEVEL_QUERIES_INDEX), it) - } + val acknowledgedResponse: AcknowledgedResponse = + client.suspendUntil { + admin().indices().delete(DeleteIndexRequest(ScheduledJob.DOC_LEVEL_QUERIES_INDEX), it) + } if (!acknowledgedResponse.isAcknowledged) { val errorMessage = "Deletion of old queryIndex [${ScheduledJob.DOC_LEVEL_QUERIES_INDEX}] index is not acknowledged!" log.error(errorMessage) @@ -92,10 +101,11 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ } val alias = ScheduledJob.DOC_LEVEL_QUERIES_INDEX val indexPattern = ScheduledJob.DOC_LEVEL_QUERIES_INDEX + INDEX_PATTERN_SUFFIX - val indexRequest = CreateIndexRequest(indexPattern) - .mapping(docLevelQueriesMappings()) - .alias(Alias(alias)) - .settings(docLevelQueriesSettings()) + val indexRequest = + CreateIndexRequest(indexPattern) + .mapping(docLevelQueriesMappings()) + .alias(Alias(alias)) + .settings(docLevelQueriesSettings()) return try { val createIndexResponse: CreateIndexResponse = client.suspendUntil { client.admin().indices().create(indexRequest, it) } createIndexResponse.isAcknowledged @@ -109,6 +119,7 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ } return true } + suspend fun initDocLevelQueryIndex(dataSources: DataSources): Boolean { if (dataSources.queryIndex == ScheduledJob.DOC_LEVEL_QUERIES_INDEX) { return initDocLevelQueryIndex() @@ -116,9 +127,10 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ // Since we changed queryIndex to be alias now, for backwards compatibility, we have to delete index with same name // as our alias, to avoid name clash. if (clusterService.state().metadata.hasIndex(dataSources.queryIndex)) { - val acknowledgedResponse: AcknowledgedResponse = client.suspendUntil { - admin().indices().delete(DeleteIndexRequest(dataSources.queryIndex), it) - } + val acknowledgedResponse: AcknowledgedResponse = + client.suspendUntil { + admin().indices().delete(DeleteIndexRequest(dataSources.queryIndex), it) + } if (!acknowledgedResponse.isAcknowledged) { log.warn("Deletion of old queryIndex [${dataSources.queryIndex}] index is not acknowledged!") } @@ -126,15 +138,18 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ val alias = dataSources.queryIndex val indexPattern = dataSources.queryIndex + INDEX_PATTERN_SUFFIX if (!clusterService.state().metadata.hasAlias(alias)) { - val indexRequest = CreateIndexRequest(indexPattern) - .mapping(docLevelQueriesMappings()) - .alias(Alias(alias)) - .settings( - Settings.builder().put("index.hidden", true) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") - .build() - ) + val indexRequest = + CreateIndexRequest(indexPattern) + .mapping(docLevelQueriesMappings()) + .alias(Alias(alias)) + .settings( + Settings + .builder() + .put("index.hidden", true) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") + .build(), + ) return try { val createIndexResponse: CreateIndexResponse = client.suspendUntil { client.admin().indices().create(indexRequest, it) } createIndexResponse.isAcknowledged @@ -160,22 +175,26 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ return } - val queryBuilder = QueryBuilders.boolQuery() - .must(QueryBuilders.existsQuery("monitor_id")) - .mustNot(QueryBuilders.wildcardQuery("monitor_id", "*")) - - val response: BulkByScrollResponse = suspendCoroutine { cont -> - DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) - .source(queryIndex) - .filter(queryBuilder) - .refresh(true) - .execute( - object : ActionListener { - override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) - override fun onFailure(t: Exception) = cont.resumeWithException(t) - } - ) - } + val queryBuilder = + QueryBuilders + .boolQuery() + .must(QueryBuilders.existsQuery("monitor_id")) + .mustNot(QueryBuilders.wildcardQuery("monitor_id", "*")) + + val response: BulkByScrollResponse = + suspendCoroutine { cont -> + DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + .source(queryIndex) + .filter(queryBuilder) + .refresh(true) + .execute( + object : ActionListener { + override fun onResponse(response: BulkByScrollResponse) = cont.resume(response) + + override fun onFailure(t: Exception) = cont.resumeWithException(t) + }, + ) + } response.bulkFailures.forEach { log.error("Failed deleting queries while removing dry run queries: [${it.id}] cause: [${it.cause}] ") } @@ -186,12 +205,13 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ } suspend fun deleteDocLevelQueryIndex(dataSources: DataSources): Boolean { - val ack: AcknowledgedResponse = client.suspendUntil { - client.admin().indices().delete( - DeleteIndexRequest(dataSources.queryIndex).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), - it - ) - } + val ack: AcknowledgedResponse = + client.suspendUntil { + client.admin().indices().delete( + DeleteIndexRequest(dataSources.queryIndex).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), + it, + ) + } return ack.isAcknowledged } @@ -218,7 +238,7 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ node: MutableMap, currentPath: String, processLeafFn: (String, String, MutableMap) -> Triple>, - flattenPaths: MutableMap> + flattenPaths: MutableMap>, ) { // If node contains "properties" property then it is internal(non-leaf) node log.debug("Node in traverse: $node") @@ -226,8 +246,12 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ var newNodes = ArrayList>(node.size) node.entries.forEach { // Compute full path relative to root - val fullPath = if (currentPath.isEmpty()) it.key - else "$currentPath.${it.key}" + val fullPath = + if (currentPath.isEmpty()) { + it.key + } else { + "$currentPath.${it.key}" + } val nodeProps = it.value as MutableMap // If it has type property and type is not "nested" then this is a leaf if (nodeProps.containsKey(TYPE) && nodeProps[TYPE] != NESTED) { @@ -258,7 +282,7 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ monitorId: String, monitorMetadata: MonitorMetadata, refreshPolicy: RefreshPolicy = RefreshPolicy.IMMEDIATE, - indexTimeout: TimeValue + indexTimeout: TimeValue, ) { val docLevelMonitorInput = monitor.inputs[0] as DocLevelMonitorInput val queries: List = docLevelMonitorInput.queries @@ -268,11 +292,12 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ // Run through each backing index and apply appropriate mappings to query index indices.forEach { indexName -> - var concreteIndices = IndexUtils.resolveAllIndices( - listOf(indexName), - monitorCtx.clusterService!!, - monitorCtx.indexNameExpressionResolver!! - ) + var concreteIndices = + IndexUtils.resolveAllIndices( + listOf(indexName), + monitorCtx.clusterService!!, + monitorCtx.indexNameExpressionResolver!!, + ) if (IndexUtils.isAlias(indexName, monitorCtx.clusterService!!.state()) || IndexUtils.isDataStream(indexName, monitorCtx.clusterService!!.state()) ) { @@ -280,11 +305,12 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ if (lastWriteIndex != null) { val lastWriteIndexCreationDate = IndexUtils.getCreationDateForIndex(lastWriteIndex, monitorCtx.clusterService!!.state()) - concreteIndices = IndexUtils.getNewestIndicesByCreationDate( - concreteIndices, - monitorCtx.clusterService!!.state(), - lastWriteIndexCreationDate - ) + concreteIndices = + IndexUtils.getNewestIndicesByCreationDate( + concreteIndices, + monitorCtx.clusterService!!.state(), + lastWriteIndexCreationDate, + ) } } val updatedIndexName = indexName.replace("*", "_") @@ -300,12 +326,15 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ val properties = ( (indexMetadata.mapping()?.sourceAsMap?.get("properties")) as MutableMap - ) + ) // Node processor function is used to process leaves of index mappings tree // val leafNodeProcessor = - fun(fieldName: String, fullPath: String, props: MutableMap): - Triple> { + fun( + fieldName: String, + fullPath: String, + props: MutableMap, + ): Triple> { val newProps = props.toMutableMap() if (monitor.dataSources.queryIndexMappingsByType.isNotEmpty()) { val mappingsByType = monitor.dataSources.queryIndexMappingsByType @@ -341,10 +370,11 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ ) { } else { if (updatedProperties.containsKey(it.key) && updatedProperties[it.key] != it.value) { - val mergedField = mergeConflictingFields( - updatedProperties[it.key] as Map, - it.value as Map - ) + val mergedField = + mergeConflictingFields( + updatedProperties[it.key] as Map, + it.value as Map, + ) updatedProperties[it.key] = mergedField } else { updatedProperties[it.key] = it.value @@ -356,14 +386,15 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ } } // Updates mappings of concrete queryIndex. This can rollover queryIndex if field mapping limit is reached. - val (updateMappingResponse, concreteQueryIndex) = updateQueryIndexMappings( - monitor, - monitorMetadata, - updatedIndexName, - sourceIndexFieldLimit, - updatedProperties, - indexTimeout - ) + val (updateMappingResponse, concreteQueryIndex) = + updateQueryIndexMappings( + monitor, + monitorMetadata, + updatedIndexName, + sourceIndexFieldLimit, + updatedProperties, + indexTimeout, + ) if (updateMappingResponse.isAcknowledged) { doIndexAllQueries( @@ -374,7 +405,7 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ allFlattenPaths, conflictingFields, refreshPolicy, - indexTimeout + indexTimeout, ) } } @@ -388,7 +419,7 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ flattenPaths: MutableSet>, conflictingPaths: Set, refreshPolicy: RefreshPolicy, - indexTimeout: TimeValue + indexTimeout: TimeValue, ) { val indexRequests = mutableListOf() val conflictingPathToConcreteIndices = mutableMapOf>() @@ -420,10 +451,11 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ if (filteredConcreteIndices.isNotEmpty()) { filteredConcreteIndices.forEach { filteredConcreteIndex -> - val newQuery = it.copy( - id = "${it.id}_$filteredConcreteIndex", - query = query.replace("", filteredConcreteIndex) - ) + val newQuery = + it.copy( + id = "${it.id}_$filteredConcreteIndex", + query = query.replace("", filteredConcreteIndex), + ) newQueries.add(newQuery) } } else { @@ -439,25 +471,28 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ query = query.replace("${fieldPath.first}:", "${fieldPath.first}_${sourceIndex}_$monitorId:") } } - val indexRequest = IndexRequest(concreteQueryIndex) - .id(it.id + "_$monitorId") - .source( - mapOf( - "query" to mapOf("query_string" to mapOf("query" to query, "fields" to it.fields)), - "monitor_id" to monitorId, - "index" to sourceIndex + val indexRequest = + IndexRequest(concreteQueryIndex) + .id(it.id + "_$monitorId") + .source( + mapOf( + "query" to mapOf("query_string" to mapOf("query" to query, "fields" to it.fields)), + "monitor_id" to monitorId, + "index" to sourceIndex, + ), ) - ) indexRequests.add(indexRequest) log.debug("query $query added for execution of monitor $monitorId on index $sourceIndex") } log.debug("bulk inserting percolate [${queries.size}] queries") if (indexRequests.isNotEmpty()) { - val bulkResponse: BulkResponse = client.suspendUntil { - client.bulk( - BulkRequest().setRefreshPolicy(refreshPolicy).timeout(indexTimeout).add(indexRequests), it - ) - } + val bulkResponse: BulkResponse = + client.suspendUntil { + client.bulk( + BulkRequest().setRefreshPolicy(refreshPolicy).timeout(indexTimeout).add(indexRequests), + it, + ) + } bulkResponse.forEach { bulkItemResponse -> if (bulkItemResponse.isFailed) { log.error(bulkItemResponse.failureMessage) @@ -469,22 +504,30 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ /** * Transforms the query if it includes an _exists_ clause to append the index name and the monitor id to the field value */ - private fun transformExistsQuery(query: String, conflictingPath: String, indexName: String, monitorId: String): String { - return query + private fun transformExistsQuery( + query: String, + conflictingPath: String, + indexName: String, + monitorId: String, + ): String = + query .replace("_exists_: ", "_exists_:") // remove space to read exists query as one string .split("\\s+".toRegex()) .joinToString(separator = " ") { segment -> if (segment.contains("_exists_:")) { val trimSegement = segment.trim { it == '(' || it == ')' } // remove any delimiters from ends val (_, value) = trimSegement.split(":", limit = 2) // split into key and value - val newString = if (value == conflictingPath) - segment.replace(conflictingPath, "${conflictingPath}_${indexName}_$monitorId") else segment + val newString = + if (value == conflictingPath) { + segment.replace(conflictingPath, "${conflictingPath}_${indexName}_$monitorId") + } else { + segment + } newString } else { segment } } - } private suspend fun updateQueryIndexMappings( monitor: Monitor, @@ -492,14 +535,14 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ sourceIndex: String, sourceIndexFieldLimit: Long, updatedProperties: MutableMap, - indexTimeout: TimeValue + indexTimeout: TimeValue, ): Pair { var targetQueryIndex = monitorMetadata.sourceToQueryIndexMapping[sourceIndex + monitor.id] if ( targetQueryIndex == null || ( targetQueryIndex != monitor.dataSources.queryIndex && monitor.deleteQueryIndexInEveryRun == true - ) + ) ) { // queryIndex is alias which will always have only 1 backing index which is writeIndex // This is due to a fact that that _rollover API would maintain only single index under alias @@ -509,7 +552,7 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ val message = "Failed to get write index for queryIndex alias:${monitor.dataSources.queryIndex}" log.error(message) throw AlertingException.wrap( - OpenSearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR) + OpenSearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR), ) } monitorMetadata.sourceToQueryIndexMapping[sourceIndex + monitor.id] = targetQueryIndex @@ -520,9 +563,10 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ try { // Adjust max field limit in mappings for query index, if needed. adjustMaxFieldLimitForQueryIndex(sourceIndexFieldLimit, targetQueryIndex) - updateMappingResponse = client.suspendUntil { - client.admin().indices().putMapping(updateMappingRequest, it) - } + updateMappingResponse = + client.suspendUntil { + client.admin().indices().putMapping(updateMappingRequest, it) + } return Pair(updateMappingResponse, targetQueryIndex) } catch (e: Exception) { val unwrappedException = ExceptionsHelper.unwrapCause(e) as Exception @@ -537,9 +581,10 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ // PUT mappings to newly created index val updateMappingRequest = PutMappingRequest(targetQueryIndex) updateMappingRequest.source(mapOf("properties" to updatedProperties)) - updateMappingResponse = client.suspendUntil { - client.admin().indices().putMapping(updateMappingRequest, it) - } + updateMappingResponse = + client.suspendUntil { + client.admin().indices().putMapping(updateMappingRequest, it) + } } catch (e: Exception) { // If we reached limit for total number of fields in mappings after rollover // it means that source index has more then (FIELD_LIMIT - 3) fields (every query index has 3 fields defined) @@ -562,14 +607,14 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ log.error( "unknown exception during PUT mapping on queryIndex: $targetQueryIndex, " + "retrying with deletion of query index", - e + e, ) if (docLevelQueryIndexExists(monitor.dataSources)) { val ack = monitorCtx.docLevelMonitorQueries!!.deleteDocLevelQueryIndex(monitor.dataSources) if (!ack) { log.error( "Deletion of concrete queryIndex:${monitor.dataSources.queryIndex} is not ack'd! " + - "for monitor ${monitor.id}" + "for monitor ${monitor.id}", ) } } @@ -578,13 +623,13 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ monitor = monitor, monitorId = monitor.id, monitorMetadata, - indexTimeout = indexTimeout + indexTimeout = indexTimeout, ) } catch (e: Exception) { log.error( "Doc level monitor ${monitor.id}: unknown exception during " + "PUT mapping on queryIndex: $targetQueryIndex", - e + e, ) val unwrappedException = ExceptionsHelper.unwrapCause(e) as Exception throw AlertingException.wrap(unwrappedException) @@ -593,7 +638,7 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ log.error( "Doc level monitor ${monitor.id}: unknown exception during " + "PUT mapping on queryIndex: $targetQueryIndex", - e + e, ) val unwrappedException = ExceptionsHelper.unwrapCause(e) as Exception throw AlertingException.wrap(unwrappedException) @@ -615,7 +660,10 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ /** * merge conflicting leaf fields in the mapping tree */ - private fun mergeConflictingFields(oldField: Map, newField: Map): Map { + private fun mergeConflictingFields( + oldField: Map, + newField: Map, + ): Map { val mergedField = mutableMapOf() oldField.entries.forEach { if (newField.containsKey(it.key)) { @@ -641,7 +689,10 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ /** * get all fields which have same name but different mappings belonging to an index pattern */ - fun getAllConflictingFields(clusterState: ClusterState, concreteIndices: List): Set { + fun getAllConflictingFields( + clusterState: ClusterState, + concreteIndices: List, + ): Set { val conflictingFields = mutableSetOf() val allFlattenPaths = mutableMapOf>() concreteIndices.forEach { concreteIndexName -> @@ -651,13 +702,15 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ val properties = ( (indexMetadata.mapping()?.sourceAsMap?.get("properties")) as MutableMap - ) + ) // Node processor function is used to process leaves of index mappings tree // val leafNodeProcessor = - fun(fieldName: String, _: String, props: MutableMap): Triple> { - return Triple(fieldName, fieldName, props) - } + fun( + fieldName: String, + _: String, + props: MutableMap, + ): Triple> = Triple(fieldName, fieldName, props) // Traverse and update index mappings here while extracting flatten field paths val flattenPaths = mutableMapOf>() traverseMappingsAndUpdate(properties, "", leafNodeProcessor, flattenPaths) @@ -678,9 +731,10 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ * checks the max field limit for a concrete index */ private suspend fun checkMaxFieldLimit(sourceIndex: String): Long { - val getSettingsResponse: GetSettingsResponse = client.suspendUntil { - admin().indices().getSettings(GetSettingsRequest().indices(sourceIndex), it) - } + val getSettingsResponse: GetSettingsResponse = + client.suspendUntil { + admin().indices().getSettings(GetSettingsRequest().indices(sourceIndex), it) + } return getSettingsResponse.getSetting(sourceIndex, INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.key)?.toLong() ?: 1000L } @@ -688,25 +742,30 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ * Adjusts max field limit index setting for query index if source index has higher limit. * This will prevent max field limit exception, when source index has more fields then query index limit */ - private suspend fun adjustMaxFieldLimitForQueryIndex(sourceIndexFieldLimit: Long, concreteQueryIndex: String) { - val getSettingsResponse: GetSettingsResponse = client.suspendUntil { - admin().indices().getSettings(GetSettingsRequest().indices(concreteQueryIndex), it) - } + private suspend fun adjustMaxFieldLimitForQueryIndex( + sourceIndexFieldLimit: Long, + concreteQueryIndex: String, + ) { + val getSettingsResponse: GetSettingsResponse = + client.suspendUntil { + admin().indices().getSettings(GetSettingsRequest().indices(concreteQueryIndex), it) + } val queryIndexLimit = getSettingsResponse.getSetting(concreteQueryIndex, INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.key)?.toLong() ?: 1000L // Our query index initially has 3 fields we defined and 5 more builtin metadata fields in mappings so we have to account for that if (sourceIndexFieldLimit > (queryIndexLimit - QUERY_INDEX_BASE_FIELDS_COUNT)) { - val updateSettingsResponse: AcknowledgedResponse = client.suspendUntil { - admin().indices().updateSettings( - UpdateSettingsRequest(concreteQueryIndex).settings( - Settings.builder().put( - INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.key, - sourceIndexFieldLimit + QUERY_INDEX_BASE_FIELDS_COUNT - ) - ), - it - ) - } + val updateSettingsResponse: AcknowledgedResponse = + client.suspendUntil { + admin().indices().updateSettings( + UpdateSettingsRequest(concreteQueryIndex).settings( + Settings.builder().put( + INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.key, + sourceIndexFieldLimit + QUERY_INDEX_BASE_FIELDS_COUNT, + ), + ), + it, + ) + } } } @@ -715,23 +774,31 @@ class DocLevelMonitorQueries(private val client: Client, private val clusterServ val queryIndexPattern = monitor.dataSources.queryIndex + INDEX_PATTERN_SUFFIX val request = RolloverRequest(queryIndex, null) - request.createIndexRequest.index(queryIndexPattern) + request.createIndexRequest + .index(queryIndexPattern) .mapping(docLevelQueriesMappings()) .settings(docLevelQueriesSettings()) - val response: RolloverResponse = client.suspendUntil { - client.admin().indices().rolloverIndex(request, it) - } + val response: RolloverResponse = + client.suspendUntil { + client.admin().indices().rolloverIndex(request, it) + } if (response.isRolledOver == false) { val message = "failed to rollover queryIndex:$queryIndex queryIndexPattern:$queryIndexPattern" log.error(message) throw AlertingException.wrap( - OpenSearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR) + OpenSearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR), ) } return response.newIndex } - private fun getWriteIndexNameForAlias(alias: String): String? { - return this.clusterService.state().metadata().indicesLookup?.get(alias)?.writeIndex?.index?.name - } + private fun getWriteIndexNameForAlias(alias: String): String? = + this.clusterService + .state() + .metadata() + .indicesLookup + ?.get(alias) + ?.writeIndex + ?.index + ?.name } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt index b388ae757..7978efad4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/IndexUtils.kt @@ -27,8 +27,8 @@ import org.opensearch.core.xcontent.XContentParser import org.opensearch.transport.client.IndicesAdminClient class IndexUtils { - companion object { + @Suppress("ktlint:standard:property-naming", "ktlint:standard:backing-property-naming") const val _META = "_meta" const val SCHEMA_VERSION = "schema_version" @@ -94,10 +94,12 @@ class IndexUtils { @JvmStatic fun getSchemaVersion(mapping: String): Int { - val xcp = XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, mapping - ) + val xcp = + XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + mapping, + ) while (!xcp.isClosed) { val token = xcp.currentToken() @@ -113,7 +115,10 @@ class IndexUtils { require(version > -1) return version } - else -> xcp.nextToken() + + else -> { + xcp.nextToken() + } } } } @@ -124,12 +129,19 @@ class IndexUtils { } @JvmStatic - fun getIndexNameWithAlias(clusterState: ClusterState, alias: String): String { - return clusterState.metadata.indices.entries.first { it.value.aliases.containsKey(alias) }.key - } + fun getIndexNameWithAlias( + clusterState: ClusterState, + alias: String, + ): String = + clusterState.metadata.indices.entries + .first { it.value.aliases.containsKey(alias) } + .key @JvmStatic - fun shouldUpdateIndex(index: IndexMetadata, mapping: String): Boolean { + fun shouldUpdateIndex( + index: IndexMetadata, + mapping: String, + ): Boolean { var oldVersion = IndexUtils.NO_SCHEMA_VERSION val newVersion = getSchemaVersion(mapping) @@ -149,7 +161,7 @@ class IndexUtils { mapping: String, clusterState: ClusterState, client: IndicesAdminClient, - actionListener: ActionListener + actionListener: ActionListener, ) { if (clusterState.metadata.indices.containsKey(index)) { if (shouldUpdateIndex(clusterState.metadata.indices[index]!!, mapping)) { @@ -162,16 +174,21 @@ class IndexUtils { } @JvmStatic - fun resolveAllIndices(indices: List, clusterService: ClusterService, resolver: IndexNameExpressionResolver): List { + fun resolveAllIndices( + indices: List, + clusterService: ClusterService, + resolver: IndexNameExpressionResolver, + ): List { val result = mutableListOf() indices.forEach { index -> - val concreteIndices = resolver.concreteIndexNames( - clusterService.state(), - IndicesOptions.lenientExpand(), - true, - index - ) + val concreteIndices = + resolver.concreteIndexNames( + clusterService.state(), + IndicesOptions.lenientExpand(), + true, + index, + ) result.addAll(concreteIndices) } @@ -179,17 +196,22 @@ class IndexUtils { } @JvmStatic - fun isDataStream(name: String, clusterState: ClusterState): Boolean { - return clusterState.metadata().dataStreams().containsKey(name) - } + fun isDataStream( + name: String, + clusterState: ClusterState, + ): Boolean = clusterState.metadata().dataStreams().containsKey(name) @JvmStatic - fun isAlias(name: String, clusterState: ClusterState): Boolean { - return clusterState.metadata().hasAlias(name) - } + fun isAlias( + name: String, + clusterState: ClusterState, + ): Boolean = clusterState.metadata().hasAlias(name) @JvmStatic - fun getWriteIndex(index: String, clusterState: ClusterState): String? { + fun getWriteIndex( + index: String, + clusterState: ClusterState, + ): String? { if (isAlias(index, clusterState) || isDataStream(index, clusterState)) { val metadata = clusterState.metadata.indicesLookup[index]?.writeIndex if (metadata != null) { @@ -200,7 +222,11 @@ class IndexUtils { } @JvmStatic - fun getNewestIndicesByCreationDate(concreteIndices: List, clusterState: ClusterState, thresholdDate: Long): List { + fun getNewestIndicesByCreationDate( + concreteIndices: List, + clusterState: ClusterState, + thresholdDate: Long, + ): List { val filteredIndices = mutableListOf() val lookup = clusterState.metadata().indicesLookup concreteIndices.forEach { indexName -> @@ -216,16 +242,16 @@ class IndexUtils { } @JvmStatic - fun getCreationDateForIndex(index: String, clusterState: ClusterState): Long { - return clusterState.metadata.index(index).creationDate - } + fun getCreationDateForIndex( + index: String, + clusterState: ClusterState, + ): Long = clusterState.metadata.index(index).creationDate @JvmStatic fun checkShardsFailure(response: IndexResponse): String? { val failureReasons = StringBuilder() if (response.shardInfo.failed > 0) { - response.shardInfo.failures.forEach { - entry -> + response.shardInfo.failures.forEach { entry -> failureReasons.append(entry.reason()) } return failureReasons.toString() diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt index b5aeaa542..a4b547c1d 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/RestHandlerUtils.kt @@ -21,7 +21,9 @@ fun context(request: RestRequest): FetchSourceContext? { val userAgent = if (request.header("User-Agent") == null) "" else request.header("User-Agent") return if (!userAgent.contains(AlertingPlugin.OPEN_SEARCH_DASHBOARDS_USER_AGENT)) { FetchSourceContext(true, Strings.EMPTY_ARRAY, AlertingPlugin.UI_METADATA_EXCLUDE) - } else null + } else { + null + } } const val IF_SEQ_NO = "if_seq_no" diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/ScheduledJobUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/ScheduledJobUtils.kt index a00f21608..04fa249d4 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/ScheduledJobUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/ScheduledJobUtils.kt @@ -24,48 +24,61 @@ class ScheduledJobUtils { companion object { const val WORKFLOW_DELEGATE_PATH = "workflow.inputs.composite_input.sequence.delegates" const val WORKFLOW_MONITOR_PATH = "workflow.inputs.composite_input.sequence.delegates.monitor_id" - fun parseWorkflowFromScheduledJobDocSource(xContentRegistry: NamedXContentRegistry, response: GetResponse): Workflow { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - try { - val workflow = ScheduledJob.parse(xcp, response.id, response.version) - if (workflow is Workflow) { - return workflow - } else { - log.error("Unable to parse workflow from ${response.source}") - throw OpenSearchStatusException( - "Unable to parse workflow from ${response.source}", - RestStatus.INTERNAL_SERVER_ERROR - ) + + fun parseWorkflowFromScheduledJobDocSource( + xContentRegistry: NamedXContentRegistry, + response: GetResponse, + ): Workflow { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + try { + val workflow = ScheduledJob.parse(xcp, response.id, response.version) + if (workflow is Workflow) { + return workflow + } else { + log.error("Unable to parse workflow from ${response.source}") + throw OpenSearchStatusException( + "Unable to parse workflow from ${response.source}", + RestStatus.INTERNAL_SERVER_ERROR, + ) + } + } catch (e: java.lang.Exception) { + throw AlertingException("Unable to parse workflow from ${response.source}", RestStatus.INTERNAL_SERVER_ERROR, e) } - } catch (e: java.lang.Exception) { - throw AlertingException("Unable to parse workflow from ${response.source}", RestStatus.INTERNAL_SERVER_ERROR, e) } - } } - fun parseMonitorFromScheduledJobDocSource(xContentRegistry: NamedXContentRegistry, response: GetResponse): Monitor { - XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - response.sourceAsBytesRef, XContentType.JSON - ).use { xcp -> - try { - val monitor = ScheduledJob.parse(xcp, response.id, response.version) - if (monitor is Monitor) { - return monitor - } else { - log.error("Unable to parse monitor from ${response.source}") - throw OpenSearchStatusException( - "Unable to parse monitor from ${response.source}", - RestStatus.INTERNAL_SERVER_ERROR - ) + fun parseMonitorFromScheduledJobDocSource( + xContentRegistry: NamedXContentRegistry, + response: GetResponse, + ): Monitor { + XContentHelper + .createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + response.sourceAsBytesRef, + XContentType.JSON, + ).use { xcp -> + try { + val monitor = ScheduledJob.parse(xcp, response.id, response.version) + if (monitor is Monitor) { + return monitor + } else { + log.error("Unable to parse monitor from ${response.source}") + throw OpenSearchStatusException( + "Unable to parse monitor from ${response.source}", + RestStatus.INTERNAL_SERVER_ERROR, + ) + } + } catch (e: java.lang.Exception) { + throw AlertingException("Unable to parse monitor from ${response.source}", RestStatus.INTERNAL_SERVER_ERROR, e) } - } catch (e: java.lang.Exception) { - throw AlertingException("Unable to parse monitor from ${response.source}", RestStatus.INTERNAL_SERVER_ERROR, e) } - } } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesHelpers.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesHelpers.kt index 36c09f244..fd4e46950 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesHelpers.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesHelpers.kt @@ -34,7 +34,9 @@ import java.time.ZoneOffset import java.time.ZonedDateTime import java.util.Locale -class CatIndicesRequestWrapper(val pathParams: String = "") : ActionRequest() { +class CatIndicesRequestWrapper( + val pathParams: String = "", +) : ActionRequest() { val log = LogManager.getLogger(CatIndicesRequestWrapper::class.java) var clusterHealthRequest: ClusterHealthRequest = @@ -66,11 +68,13 @@ class CatIndicesRequestWrapper(val pathParams: String = "") : ActionRequest() { override fun validate(): ActionRequestValidationException? { var exception: ActionRequestValidationException? = null - if (pathParams.isNotBlank() && indicesList.any { !VALID_INDEX_NAME_REGEX.containsMatchIn(it) }) - exception = ValidateActions.addValidationError( - "The path parameters do not form a valid, comma-separated list of data streams, indices, or index aliases.", - exception - ) + if (pathParams.isNotBlank() && indicesList.any { !VALID_INDEX_NAME_REGEX.containsMatchIn(it) }) { + exception = + ValidateActions.addValidationError( + "The path parameters do not form a valid, comma-separated list of data streams, indices, or index aliases.", + exception, + ) + } return exception } } @@ -79,17 +83,19 @@ class CatIndicesResponseWrapper( clusterHealthResponse: ClusterHealthResponse, clusterStateResponse: ClusterStateResponse, indexSettingsResponse: GetSettingsResponse, - indicesStatsResponse: IndicesStatsResponse -) : ActionResponse(), ToXContentObject { + indicesStatsResponse: IndicesStatsResponse, +) : ActionResponse(), + ToXContentObject { var indexInfoList: List = listOf() init { - indexInfoList = compileIndexInfo( - clusterHealthResponse, - clusterStateResponse, - indexSettingsResponse, - indicesStatsResponse - ) + indexInfoList = + compileIndexInfo( + clusterHealthResponse, + clusterStateResponse, + indexSettingsResponse, + indicesStatsResponse, + ) } companion object { @@ -100,7 +106,10 @@ class CatIndicesResponseWrapper( out.writeList(indexInfoList) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject() builder.startArray(WRAPPER_FIELD) indexInfoList.forEach { it.toXContent(builder, params) } @@ -112,7 +121,7 @@ class CatIndicesResponseWrapper( clusterHealthResponse: ClusterHealthResponse, clusterStateResponse: ClusterStateResponse, indexSettingsResponse: GetSettingsResponse, - indicesStatsResponse: IndicesStatsResponse + indicesStatsResponse: IndicesStatsResponse, ): List { val list = mutableListOf() @@ -159,8 +168,10 @@ class CatIndicesResponseWrapper( docsCount = "${primaryStats?.getDocs()?.count}", docsDeleted = "${primaryStats?.getDocs()?.deleted}", creationDate = "${indexMetadata?.creationDate}", - creationDateString = DateFormatter.forPattern("strict_date_time") - .format(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetadata!!.creationDate), ZoneOffset.UTC)), + creationDateString = + DateFormatter + .forPattern("strict_date_time") + .format(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetadata!!.creationDate), ZoneOffset.UTC)), storeSize = "${totalStats?.store?.size}", priStoreSize = "${primaryStats?.store?.size}", completionSize = "${totalStats?.completion?.size}", @@ -288,7 +299,7 @@ class CatIndicesResponseWrapper( memoryTotal = "${totalStats?.totalMemory}", priMemoryTotal = "${primaryStats?.totalMemory}", searchThrottled = "$searchThrottled", - ) + ), ) } @@ -432,8 +443,9 @@ class CatIndicesResponseWrapper( val priSuggestTotal: String?, val memoryTotal: String?, val priMemoryTotal: String?, - val searchThrottled: String? - ) : ToXContentObject, Writeable { + val searchThrottled: String?, + ) : ToXContentObject, + Writeable { companion object { const val HEALTH_FIELD = "health" const val STATUS_FIELD = "status" @@ -574,8 +586,12 @@ class CatIndicesResponseWrapper( const val SEARCH_THROTTLED_FIELD = "search.throttled" } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + builder + .startObject() .field(HEALTH_FIELD, health) .field(STATUS_FIELD, status) .field(INDEX_FIELD, index) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsHelpers.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsHelpers.kt index f092b7f12..7aa3025c5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsHelpers.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsHelpers.kt @@ -41,7 +41,9 @@ import java.time.Instant import java.util.Locale import java.util.function.Function -class CatShardsRequestWrapper(val pathParams: String = "") : ActionRequest() { +class CatShardsRequestWrapper( + val pathParams: String = "", +) : ActionRequest() { var clusterStateRequest: ClusterStateRequest = ClusterStateRequest().clear().nodes(true).routingTable(true) var indicesStatsRequest: IndicesStatsRequest = @@ -63,19 +65,22 @@ class CatShardsRequestWrapper(val pathParams: String = "") : ActionRequest() { override fun validate(): ActionRequestValidationException? { var exception: ActionRequestValidationException? = null - if (pathParams.isNotBlank() && indicesList.any { !VALID_INDEX_NAME_REGEX.containsMatchIn(it) }) - exception = ValidateActions.addValidationError( - "The path parameters do not form a valid, comma-separated list of data streams, indices, or index aliases.", - exception - ) + if (pathParams.isNotBlank() && indicesList.any { !VALID_INDEX_NAME_REGEX.containsMatchIn(it) }) { + exception = + ValidateActions.addValidationError( + "The path parameters do not form a valid, comma-separated list of data streams, indices, or index aliases.", + exception, + ) + } return exception } } class CatShardsResponseWrapper( stateResp: ClusterStateResponse, - indicesResp: IndicesStatsResponse -) : ActionResponse(), ToXContentObject { + indicesResp: IndicesStatsResponse, +) : ActionResponse(), + ToXContentObject { var shardInfoList: List = listOf() init { @@ -90,7 +95,10 @@ class CatShardsResponseWrapper( out.writeList(shardInfoList) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.startObject() builder.startArray(WRAPPER_FIELD) shardInfoList.forEach { it.toXContent(builder, params) } @@ -98,7 +106,11 @@ class CatShardsResponseWrapper( return builder.endObject() } - private fun getOrNull(stats: S?, accessor: Function, func: Function): Any? { + private fun getOrNull( + stats: S?, + accessor: Function, + func: Function, + ): Any? { if (stats != null) { val t: T? = accessor.apply(stats) if (t != null) { @@ -110,7 +122,7 @@ class CatShardsResponseWrapper( private fun compileShardInfo( stateResp: ClusterStateResponse, - indicesResp: IndicesStatsResponse + indicesResp: IndicesStatsResponse, ): List { val list = mutableListOf() @@ -123,107 +135,130 @@ class CatShardsResponseWrapper( commitStats = shardStats.commitStats } - var shardInfo = ShardInfo( - index = shard.indexName, - shard = "${shard.id}", - primaryOrReplica = if (shard.primary()) "p" else "r", - state = shard.state().name, - docs = getOrNull(commonStats, CommonStats::getDocs, DocsStats::getCount)?.toString(), - store = getOrNull(commonStats, CommonStats::getStore, StoreStats::getSize)?.toString(), - id = null, // Added below - node = null, // Added below - completionSize = getOrNull(commonStats, CommonStats::getCompletion, CompletionStats::getSize)?.toString(), - fieldDataMemory = getOrNull(commonStats, CommonStats::getFieldData, FieldDataStats::getMemorySize)?.toString(), - fieldDataEvictions = getOrNull(commonStats, CommonStats::getFieldData, FieldDataStats::getEvictions)?.toString(), - flushTotal = getOrNull(commonStats, CommonStats::getFlush, FlushStats::getTotal)?.toString(), - flushTotalTime = getOrNull(commonStats, CommonStats::getFlush, FlushStats::getTotalTime)?.toString(), - getCurrent = getOrNull(commonStats, CommonStats::getGet, GetStats::current)?.toString(), - getTime = getOrNull(commonStats, CommonStats::getGet, GetStats::getTime)?.toString(), - getTotal = getOrNull(commonStats, CommonStats::getGet, GetStats::getCount)?.toString(), - getExistsTime = getOrNull(commonStats, CommonStats::getGet, GetStats::getExistsTime)?.toString(), - getExistsTotal = getOrNull(commonStats, CommonStats::getGet, GetStats::getExistsCount)?.toString(), - getMissingTime = getOrNull(commonStats, CommonStats::getGet, GetStats::getMissingTime)?.toString(), - getMissingTotal = getOrNull(commonStats, CommonStats::getGet, GetStats::getMissingCount)?.toString(), - indexingDeleteCurrent = getOrNull(commonStats, CommonStats::getIndexing, { it.total.deleteCurrent })?.toString(), - indexingDeleteTime = getOrNull(commonStats, CommonStats::getIndexing, { it.total.deleteTime })?.toString(), - indexingDeleteTotal = getOrNull(commonStats, CommonStats::getIndexing, { it.total.deleteCount })?.toString(), - indexingIndexCurrent = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexCurrent })?.toString(), - indexingIndexTime = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexTime })?.toString(), - indexingIndexTotal = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexCount })?.toString(), - indexingIndexFailed = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexFailedCount })?.toString(), - mergesCurrent = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getCurrent)?.toString(), - mergesCurrentDocs = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getCurrentNumDocs)?.toString(), - mergesCurrentSize = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getCurrentSize)?.toString(), - mergesTotal = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotal)?.toString(), - mergesTotalDocs = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotalNumDocs)?.toString(), - mergesTotalSize = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotalSize)?.toString(), - mergesTotalTime = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotalTime)?.toString(), - queryCacheMemory = getOrNull(commonStats, CommonStats::getQueryCache, QueryCacheStats::getMemorySize)?.toString(), - queryCacheEvictions = getOrNull(commonStats, CommonStats::getQueryCache, QueryCacheStats::getEvictions)?.toString(), - recoverySourceType = null, // Added below - refreshTotal = getOrNull(commonStats, CommonStats::getRefresh, RefreshStats::getTotal)?.toString(), - refreshTime = getOrNull(commonStats, CommonStats::getRefresh, RefreshStats::getTotalTime)?.toString(), - searchFetchCurrent = getOrNull(commonStats, CommonStats::getSearch, { it.total.fetchCurrent })?.toString(), - searchFetchTime = getOrNull(commonStats, CommonStats::getSearch, { it.total.fetchTime })?.toString(), - searchFetchTotal = getOrNull(commonStats, CommonStats::getSearch, { it.total.fetchCount })?.toString(), - searchOpenContexts = getOrNull(commonStats, CommonStats::getSearch, SearchStats::getOpenContexts)?.toString(), - searchQueryCurrent = getOrNull(commonStats, CommonStats::getSearch, { it.total.queryCurrent })?.toString(), - searchQueryTime = getOrNull(commonStats, CommonStats::getSearch, { it.total.queryTime })?.toString(), - searchQueryTotal = getOrNull(commonStats, CommonStats::getSearch, { it.total.queryCount })?.toString(), - searchScrollCurrent = getOrNull(commonStats, CommonStats::getSearch, { it.total.scrollCurrent })?.toString(), - searchScrollTime = getOrNull(commonStats, CommonStats::getSearch, { it.total.scrollTime })?.toString(), - searchScrollTotal = getOrNull(commonStats, CommonStats::getSearch, { it.total.scrollCount })?.toString(), - segmentsCount = getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getCount)?.toString(), - segmentsMemory = getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getZeroMemory)?.toString(), - segmentsIndexWriterMemory = - getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getIndexWriterMemory)?.toString(), - segmentsVersionMapMemory = getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getVersionMapMemory)?.toString(), - fixedBitsetMemory = getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getBitsetMemory)?.toString(), - globalCheckpoint = getOrNull(shardStats, ShardStats::getSeqNoStats, SeqNoStats::getGlobalCheckpoint)?.toString(), - localCheckpoint = getOrNull(shardStats, ShardStats::getSeqNoStats, SeqNoStats::getLocalCheckpoint)?.toString(), - maxSeqNo = getOrNull(shardStats, ShardStats::getSeqNoStats, SeqNoStats::getMaxSeqNo)?.toString(), - syncId = commitStats?.userData?.get(Engine.SYNC_COMMIT_ID), - unassignedAt = null, // Added below - unassignedDetails = null, // Added below - unassignedFor = null, // Added below - unassignedReason = null // Added below - ) + var shardInfo = + ShardInfo( + index = shard.indexName, + shard = "${shard.id}", + primaryOrReplica = if (shard.primary()) "p" else "r", + state = shard.state().name, + docs = getOrNull(commonStats, CommonStats::getDocs, DocsStats::getCount)?.toString(), + store = getOrNull(commonStats, CommonStats::getStore, StoreStats::getSize)?.toString(), + id = null, // Added below + node = null, // Added below + completionSize = getOrNull(commonStats, CommonStats::getCompletion, CompletionStats::getSize)?.toString(), + fieldDataMemory = getOrNull(commonStats, CommonStats::getFieldData, FieldDataStats::getMemorySize)?.toString(), + fieldDataEvictions = getOrNull(commonStats, CommonStats::getFieldData, FieldDataStats::getEvictions)?.toString(), + flushTotal = getOrNull(commonStats, CommonStats::getFlush, FlushStats::getTotal)?.toString(), + flushTotalTime = getOrNull(commonStats, CommonStats::getFlush, FlushStats::getTotalTime)?.toString(), + getCurrent = getOrNull(commonStats, CommonStats::getGet, GetStats::current)?.toString(), + getTime = getOrNull(commonStats, CommonStats::getGet, GetStats::getTime)?.toString(), + getTotal = getOrNull(commonStats, CommonStats::getGet, GetStats::getCount)?.toString(), + getExistsTime = getOrNull(commonStats, CommonStats::getGet, GetStats::getExistsTime)?.toString(), + getExistsTotal = getOrNull(commonStats, CommonStats::getGet, GetStats::getExistsCount)?.toString(), + getMissingTime = getOrNull(commonStats, CommonStats::getGet, GetStats::getMissingTime)?.toString(), + getMissingTotal = getOrNull(commonStats, CommonStats::getGet, GetStats::getMissingCount)?.toString(), + indexingDeleteCurrent = getOrNull(commonStats, CommonStats::getIndexing, { it.total.deleteCurrent })?.toString(), + indexingDeleteTime = getOrNull(commonStats, CommonStats::getIndexing, { it.total.deleteTime })?.toString(), + indexingDeleteTotal = getOrNull(commonStats, CommonStats::getIndexing, { it.total.deleteCount })?.toString(), + indexingIndexCurrent = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexCurrent })?.toString(), + indexingIndexTime = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexTime })?.toString(), + indexingIndexTotal = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexCount })?.toString(), + indexingIndexFailed = getOrNull(commonStats, CommonStats::getIndexing, { it.total.indexFailedCount })?.toString(), + mergesCurrent = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getCurrent)?.toString(), + mergesCurrentDocs = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getCurrentNumDocs)?.toString(), + mergesCurrentSize = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getCurrentSize)?.toString(), + mergesTotal = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotal)?.toString(), + mergesTotalDocs = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotalNumDocs)?.toString(), + mergesTotalSize = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotalSize)?.toString(), + mergesTotalTime = getOrNull(commonStats, CommonStats::getMerge, MergeStats::getTotalTime)?.toString(), + queryCacheMemory = getOrNull(commonStats, CommonStats::getQueryCache, QueryCacheStats::getMemorySize)?.toString(), + queryCacheEvictions = getOrNull(commonStats, CommonStats::getQueryCache, QueryCacheStats::getEvictions)?.toString(), + recoverySourceType = null, // Added below + refreshTotal = getOrNull(commonStats, CommonStats::getRefresh, RefreshStats::getTotal)?.toString(), + refreshTime = getOrNull(commonStats, CommonStats::getRefresh, RefreshStats::getTotalTime)?.toString(), + searchFetchCurrent = getOrNull(commonStats, CommonStats::getSearch, { it.total.fetchCurrent })?.toString(), + searchFetchTime = getOrNull(commonStats, CommonStats::getSearch, { it.total.fetchTime })?.toString(), + searchFetchTotal = getOrNull(commonStats, CommonStats::getSearch, { it.total.fetchCount })?.toString(), + searchOpenContexts = getOrNull(commonStats, CommonStats::getSearch, SearchStats::getOpenContexts)?.toString(), + searchQueryCurrent = getOrNull(commonStats, CommonStats::getSearch, { it.total.queryCurrent })?.toString(), + searchQueryTime = getOrNull(commonStats, CommonStats::getSearch, { it.total.queryTime })?.toString(), + searchQueryTotal = getOrNull(commonStats, CommonStats::getSearch, { it.total.queryCount })?.toString(), + searchScrollCurrent = getOrNull(commonStats, CommonStats::getSearch, { it.total.scrollCurrent })?.toString(), + searchScrollTime = getOrNull(commonStats, CommonStats::getSearch, { it.total.scrollTime })?.toString(), + searchScrollTotal = getOrNull(commonStats, CommonStats::getSearch, { it.total.scrollCount })?.toString(), + segmentsCount = getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getCount)?.toString(), + segmentsMemory = getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getZeroMemory)?.toString(), + segmentsIndexWriterMemory = + getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getIndexWriterMemory)?.toString(), + segmentsVersionMapMemory = + getOrNull( + commonStats, + CommonStats::getSegments, + SegmentsStats::getVersionMapMemory, + )?.toString(), + fixedBitsetMemory = getOrNull(commonStats, CommonStats::getSegments, SegmentsStats::getBitsetMemory)?.toString(), + globalCheckpoint = getOrNull(shardStats, ShardStats::getSeqNoStats, SeqNoStats::getGlobalCheckpoint)?.toString(), + localCheckpoint = getOrNull(shardStats, ShardStats::getSeqNoStats, SeqNoStats::getLocalCheckpoint)?.toString(), + maxSeqNo = getOrNull(shardStats, ShardStats::getSeqNoStats, SeqNoStats::getMaxSeqNo)?.toString(), + syncId = commitStats?.userData?.get(Engine.SYNC_COMMIT_ID), + unassignedAt = null, // Added below + unassignedDetails = null, // Added below + unassignedFor = null, // Added below + unassignedReason = null, // Added below + ) if (shard.assignedToNode()) { val id = shard.currentNodeId() val node = StringBuilder() - node.append(stateResp.state.nodes().get(id).name) + node.append( + stateResp.state + .nodes() + .get(id) + .name, + ) if (shard.relocating()) { val reloNodeId = shard.relocatingNodeId() - val reloName = stateResp.state.nodes().get(reloNodeId).name + val reloName = + stateResp.state + .nodes() + .get(reloNodeId) + .name node.append(" -> ") node.append(reloNodeId) node.append(" ") node.append(reloName) } - shardInfo = shardInfo.copy( - id = id, - node = node.toString() - ) + shardInfo = + shardInfo.copy( + id = id, + node = node.toString(), + ) } if (shard.unassignedInfo() != null) { val unassignedTime = Instant.ofEpochMilli(shard.unassignedInfo().unassignedTimeInMillis) - shardInfo = shardInfo.copy( - unassignedReason = shard.unassignedInfo().reason.name, - unassignedAt = UnassignedInfo.DATE_TIME_FORMATTER.format(unassignedTime), - unassignedFor = - TimeValue.timeValueMillis(System.currentTimeMillis() - shard.unassignedInfo().unassignedTimeInMillis).stringRep, - unassignedDetails = shard.unassignedInfo().details - ) + shardInfo = + shardInfo.copy( + unassignedReason = shard.unassignedInfo().reason.name, + unassignedAt = UnassignedInfo.DATE_TIME_FORMATTER.format(unassignedTime), + unassignedFor = + TimeValue.timeValueMillis(System.currentTimeMillis() - shard.unassignedInfo().unassignedTimeInMillis).stringRep, + unassignedDetails = shard.unassignedInfo().details, + ) } if (shard.recoverySource() != null) { - shardInfo = shardInfo.copy( - recoverySourceType = shard.recoverySource().type.toString().lowercase(Locale.ROOT) - ) + shardInfo = + shardInfo.copy( + recoverySourceType = + shard + .recoverySource() + .type + .toString() + .lowercase(Locale.ROOT), + ) } list.add(shardInfo) @@ -293,8 +328,9 @@ class CatShardsResponseWrapper( val unassignedAt: String?, val unassignedDetails: String?, val unassignedFor: String?, - val unassignedReason: String? - ) : ToXContentObject, Writeable { + val unassignedReason: String?, + ) : ToXContentObject, + Writeable { companion object { const val INDEX_FIELD = "index" const val SHARD_FIELD = "shard" @@ -360,8 +396,12 @@ class CatShardsResponseWrapper( const val UNASSIGNED_REASON_FIELD = "unassigned.reason" } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - builder.startObject() + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + builder + .startObject() .field(INDEX_FIELD, index) .field(SHARD_FIELD, shard) .field(PRIMARY_OR_REPLICA_FIELD, primaryOrReplica) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensions.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensions.kt index 18afc6cc4..7dbbbbdfc 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensions.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensions.kt @@ -41,7 +41,10 @@ import org.opensearch.transport.client.Client * @param client The [Client] used to call the respective transport action. * @throws IllegalArgumentException When the requested API is not supported by this feature. */ -suspend fun executeTransportAction(clusterMetricsInput: ClusterMetricsInput, client: Client): ActionResponse { +suspend fun executeTransportAction( + clusterMetricsInput: ClusterMetricsInput, + client: Client, +): ActionResponse { val request = resolveToActionRequest(clusterMetricsInput) return when (clusterMetricsInput.clusterMetricType) { ClusterMetricsInput.ClusterMetricType.CAT_INDICES -> { @@ -55,12 +58,17 @@ suspend fun executeTransportAction(clusterMetricsInput: ClusterMetricsInput, cli client.suspendUntil { admin().cluster().state(request.clusterStateRequest, it) } return CatIndicesResponseWrapper(healthResponse, stateResponse, indexSettingsResponse, indicesResponse) } - ClusterMetricsInput.ClusterMetricType.CAT_PENDING_TASKS -> + + ClusterMetricsInput.ClusterMetricType.CAT_PENDING_TASKS -> { client.suspendUntil { admin().cluster().pendingClusterTasks(request as PendingClusterTasksRequest, it) } - ClusterMetricsInput.ClusterMetricType.CAT_RECOVERY -> + } + + ClusterMetricsInput.ClusterMetricType.CAT_RECOVERY -> { client.suspendUntil { admin().indices().recoveries(request as RecoveryRequest, it) } + } + ClusterMetricsInput.ClusterMetricType.CAT_SHARDS -> { request as CatShardsRequestWrapper val stateResponse: ClusterStateResponse = @@ -69,23 +77,37 @@ suspend fun executeTransportAction(clusterMetricsInput: ClusterMetricsInput, cli client.suspendUntil { admin().indices().stats(request.indicesStatsRequest, it) } return CatShardsResponseWrapper(stateResponse, indicesResponse) } - ClusterMetricsInput.ClusterMetricType.CAT_SNAPSHOTS -> + + ClusterMetricsInput.ClusterMetricType.CAT_SNAPSHOTS -> { client.suspendUntil { admin().cluster().getSnapshots(request as GetSnapshotsRequest, it) } - ClusterMetricsInput.ClusterMetricType.CAT_TASKS -> + } + + ClusterMetricsInput.ClusterMetricType.CAT_TASKS -> { client.suspendUntil { admin().cluster().listTasks(request as ListTasksRequest, it) } - ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH -> + } + + ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH -> { client.suspendUntil { admin().cluster().health(request as ClusterHealthRequest, it) } + } + ClusterMetricsInput.ClusterMetricType.CLUSTER_SETTINGS -> { val stateResponse: ClusterStateResponse = client.suspendUntil { admin().cluster().state(request as ClusterStateRequest, it) } val metadata: Metadata = stateResponse.state.metadata return ClusterGetSettingsResponse(metadata.persistentSettings(), metadata.transientSettings(), Settings.EMPTY) } - ClusterMetricsInput.ClusterMetricType.CLUSTER_STATS -> + + ClusterMetricsInput.ClusterMetricType.CLUSTER_STATS -> { client.suspendUntil { admin().cluster().clusterStats(request as ClusterStatsRequest, it) } - ClusterMetricsInput.ClusterMetricType.NODES_STATS -> + } + + ClusterMetricsInput.ClusterMetricType.NODES_STATS -> { client.suspendUntil { admin().cluster().nodesStats(request as NodesStatsRequest, it) } - else -> throw IllegalArgumentException("Unsupported API request type: ${request.javaClass.name}") + } + + else -> { + throw IllegalArgumentException("Unsupported API request type: ${request.javaClass.name}") + } } } @@ -94,51 +116,84 @@ suspend fun executeTransportAction(clusterMetricsInput: ClusterMetricsInput, cli * @return The [ActionResponse] values formatted in a [HashMap]. * @throws IllegalArgumentException when the [ActionResponse] is not supported by this feature. */ -fun ActionResponse.toMap(): Map { - return when (this) { - is ClusterHealthResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH.defaultPath) - ) - is ClusterStatsResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_STATS.defaultPath) - ) - is ClusterGetSettingsResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_SETTINGS.defaultPath) - ) - is CatIndicesResponseWrapper -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_INDICES.defaultPath) - ) - is CatShardsResponseWrapper -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_SHARDS.defaultPath) - ) - is NodesStatsResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.NODES_STATS.defaultPath) - ) - is PendingClusterTasksResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_PENDING_TASKS.defaultPath) - ) - is RecoveryResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_RECOVERY.defaultPath) - ) - is GetSnapshotsResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_SNAPSHOTS.defaultPath) - ) - is ListTasksResponse -> redactFieldsFromResponse( - this.convertToMap(), - SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_TASKS.defaultPath) - ) - else -> throw IllegalArgumentException("Unsupported ActionResponse type: ${this.javaClass.name}") +fun ActionResponse.toMap(): Map = + when (this) { + is ClusterHealthResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH.defaultPath), + ) + } + + is ClusterStatsResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_STATS.defaultPath), + ) + } + + is ClusterGetSettingsResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CLUSTER_SETTINGS.defaultPath), + ) + } + + is CatIndicesResponseWrapper -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_INDICES.defaultPath), + ) + } + + is CatShardsResponseWrapper -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_SHARDS.defaultPath), + ) + } + + is NodesStatsResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.NODES_STATS.defaultPath), + ) + } + + is PendingClusterTasksResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload( + ClusterMetricsInput.ClusterMetricType.CAT_PENDING_TASKS.defaultPath, + ), + ) + } + + is RecoveryResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_RECOVERY.defaultPath), + ) + } + + is GetSnapshotsResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_SNAPSHOTS.defaultPath), + ) + } + + is ListTasksResponse -> { + redactFieldsFromResponse( + this.convertToMap(), + SupportedClusterMetricsSettings.getSupportedJsonPayload(ClusterMetricsInput.ClusterMetricType.CAT_TASKS.defaultPath), + ) + } + + else -> { + throw IllegalArgumentException("Unsupported ActionResponse type: ${this.javaClass.name}") + } } -} /** * Populates a [HashMap] with only the values that support being exposed to users. @@ -149,22 +204,31 @@ fun ActionResponse.toMap(): Map { @Suppress("UNCHECKED_CAST") fun redactFieldsFromResponse( mappedActionResponse: Map, - supportedJsonPayload: Map> -): Map { - return when { - supportedJsonPayload.isEmpty() -> mappedActionResponse + supportedJsonPayload: Map>, +): Map = + when { + supportedJsonPayload.isEmpty() -> { + mappedActionResponse + } + else -> { val output = hashMapOf() for ((key, value) in supportedJsonPayload) { when (val mappedValue = mappedActionResponse[key]) { - is Map<*, *> -> output[key] = XContentMapValues.filter( - mappedActionResponse[key] as MutableMap?, - value.toTypedArray(), arrayOf() - ) - else -> output[key] = mappedValue ?: hashMapOf() + is Map<*, *> -> { + output[key] = + XContentMapValues.filter( + mappedActionResponse[key] as MutableMap?, + value.toTypedArray(), + arrayOf(), + ) + } + + else -> { + output[key] = mappedValue ?: hashMapOf() + } } } output } } -} diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationConversionUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationConversionUtils.kt index d6e8c6ec0..9821fa194 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationConversionUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationConversionUtils.kt @@ -27,9 +27,7 @@ import java.net.URISyntaxException import java.util.Locale class DestinationConversionUtils { - companion object { - fun convertDestinationToNotificationConfig(destination: Destination): NotificationConfig? { when (destination.type) { DestinationType.CHIME -> { @@ -40,9 +38,10 @@ class DestinationConversionUtils { destination.name, description, ConfigType.CHIME, - chime + chime, ) } + DestinationType.SLACK -> { val alertSlack = destination.slack ?: return null val slack = Slack(alertSlack.url) @@ -51,9 +50,10 @@ class DestinationConversionUtils { destination.name, description, ConfigType.SLACK, - slack + slack, ) } + // TODO: Add this back after adding SNS to Destination data models // DestinationType.SNS -> { // val alertSNS = destination.sns ?: return null @@ -68,37 +68,42 @@ class DestinationConversionUtils { // } DestinationType.CUSTOM_WEBHOOK -> { val alertWebhook = destination.customWebhook ?: return null - val uri = buildUri( - alertWebhook.url, - alertWebhook.scheme, - alertWebhook.host, - alertWebhook.port, - alertWebhook.path, - alertWebhook.queryParams - ).toString() - val methodType = when (alertWebhook.method?.uppercase(Locale.ENGLISH)) { - "POST" -> HttpMethodType.POST - "PUT" -> HttpMethodType.PUT - "PATCH" -> HttpMethodType.PATCH - else -> HttpMethodType.POST - } + val uri = + buildUri( + alertWebhook.url, + alertWebhook.scheme, + alertWebhook.host, + alertWebhook.port, + alertWebhook.path, + alertWebhook.queryParams, + ).toString() + val methodType = + when (alertWebhook.method?.uppercase(Locale.ENGLISH)) { + "POST" -> HttpMethodType.POST + "PUT" -> HttpMethodType.PUT + "PATCH" -> HttpMethodType.PATCH + else -> HttpMethodType.POST + } val webhook = Webhook(uri, alertWebhook.headerParams, methodType) val description = "Webhook destination created from the Alerting plugin" return NotificationConfig( destination.name, description, ConfigType.WEBHOOK, - webhook + webhook, ) } + DestinationType.EMAIL -> { val alertEmail = destination.email ?: return null val recipients = mutableListOf() val emailGroupIds = mutableListOf() alertEmail.recipients.forEach { - if (it.type == Recipient.RecipientType.EMAIL_GROUP) + if (it.type == Recipient.RecipientType.EMAIL_GROUP) { it.emailGroupID?.let { emailGroup -> emailGroupIds.add(emailGroup) } - else it.email?.let { emailRecipient -> recipients.add(EmailRecipient(emailRecipient)) } + } else { + it.email?.let { emailRecipient -> recipients.add(EmailRecipient(emailRecipient)) } + } } val email = Email(alertEmail.emailAccountID, recipients, emailGroupIds) @@ -107,10 +112,13 @@ class DestinationConversionUtils { destination.name, description, ConfigType.EMAIL, - email + email, ) } - else -> return null + + else -> { + return null + } } } @@ -122,12 +130,12 @@ class DestinationConversionUtils { emailAccount.name, description, ConfigType.SMTP_ACCOUNT, - smtpAccount + smtpAccount, ) } fun convertEmailGroupToNotificationConfig( - emailGroup: org.opensearch.alerting.model.destination.email.EmailGroup + emailGroup: org.opensearch.alerting.model.destination.email.EmailGroup, ): NotificationConfig { val recipients = mutableListOf() emailGroup.emails.forEach { @@ -140,7 +148,7 @@ class DestinationConversionUtils { emailGroup.name, description, ConfigType.EMAIL_GROUP, - notificationEmailGroup + notificationEmailGroup, ) } @@ -150,7 +158,7 @@ class DestinationConversionUtils { host: String?, port: Int, path: String?, - queryParams: Map + queryParams: Map, ): URI? { return try { if (Strings.isNullOrEmpty(endpoint)) { @@ -165,7 +173,12 @@ class DestinationConversionUtils { if (queryParams.isNotEmpty()) { for ((key, value) in queryParams) uriBuilder.addParameter(key, value) } - return uriBuilder.setScheme(uriScheme).setHost(host).setPort(port).setPath(path).build() + return uriBuilder + .setScheme(uriScheme) + .setHost(host) + .setPort(port) + .setPath(path) + .build() } URIBuilder(endpoint).build() } catch (e: URISyntaxException) { @@ -173,12 +186,11 @@ class DestinationConversionUtils { } } - fun convertAlertingToNotificationMethodType(alertMethodType: EmailAccount.MethodType): MethodType { - return when (alertMethodType) { + fun convertAlertingToNotificationMethodType(alertMethodType: EmailAccount.MethodType): MethodType = + when (alertMethodType) { EmailAccount.MethodType.NONE -> MethodType.NONE EmailAccount.MethodType.SSL -> MethodType.SSL EmailAccount.MethodType.TLS -> MethodType.START_TLS } - } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationCoordinator.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationCoordinator.kt index 9cee3f893..8dc8c39a3 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationCoordinator.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationCoordinator.kt @@ -26,9 +26,10 @@ class DestinationMigrationCoordinator( private val client: Client, private val clusterService: ClusterService, private val threadPool: ThreadPool, - private val scheduledJobIndices: ScheduledJobIndices -) : ClusterStateListener, CoroutineScope, LifecycleListener() { - + private val scheduledJobIndices: ScheduledJobIndices, +) : LifecycleListener(), + ClusterStateListener, + CoroutineScope { private val logger = LogManager.getLogger(javaClass) override val coroutineContext: CoroutineContext @@ -85,21 +86,22 @@ class DestinationMigrationCoordinator( return } - val scheduledJob = Runnable { - launch { - try { - if (DestinationMigrationUtilService.finishFlag) { - logger.info("Cancel background destination migration process.") - scheduledMigration?.cancel() - } + val scheduledJob = + Runnable { + launch { + try { + if (DestinationMigrationUtilService.finishFlag) { + logger.info("Cancel background destination migration process.") + scheduledMigration?.cancel() + } - logger.info("Performing migration of destination data.") - DestinationMigrationUtilService.migrateDestinations(client as NodeClient) - } catch (e: Exception) { - logger.error("Failed to migrate destination data", e) + logger.info("Performing migration of destination data.") + DestinationMigrationUtilService.migrateDestinations(client as NodeClient) + } catch (e: Exception) { + logger.error("Failed to migrate destination data", e) + } } } - } scheduledMigration = threadPool.scheduleWithFixedDelay(scheduledJob, TimeValue.timeValueMinutes(1), ThreadPool.Names.MANAGEMENT) } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt index 0acfb66a2..306040402 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilService.kt @@ -40,9 +40,7 @@ import org.opensearch.transport.client.node.NodeClient import java.time.Instant class DestinationMigrationUtilService { - companion object { - private val logger = LogManager.getLogger(DestinationMigrationUtilService::class) @Volatile @@ -72,7 +70,7 @@ class DestinationMigrationUtilService { "Need to migrate ${emailAccountsToMigrate.size} email accounts, " + "${emailGroupsToMigrate.size} email groups and " + "${destinationsToMigrate.size} destinations " + - "(${configsToMigrate.size} configs total)" + "(${configsToMigrate.size} configs total)", ) if (configsToMigrate.isEmpty()) { finishFlag = true @@ -88,7 +86,10 @@ class DestinationMigrationUtilService { } } - private suspend fun deleteOldDestinations(client: NodeClient, destinationIds: List): List { + private suspend fun deleteOldDestinations( + client: NodeClient, + destinationIds: List, + ): List { val bulkDeleteRequest = BulkRequest().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) destinationIds.forEach { val deleteRequest = DeleteRequest(ScheduledJob.SCHEDULED_JOBS_INDEX, it) @@ -108,21 +109,24 @@ class DestinationMigrationUtilService { private suspend fun createNotificationChannelIfNotExists( client: NodeClient, - notificationConfigInfoList: List> + notificationConfigInfoList: List>, ): List { val migratedNotificationConfigs = mutableListOf() notificationConfigInfoList.forEach { val notificationConfigInfo = it.first val userStr = it.second - val createNotificationConfigRequest = CreateNotificationConfigRequest( - notificationConfigInfo.notificationConfig, - notificationConfigInfo.configId - ) + val createNotificationConfigRequest = + CreateNotificationConfigRequest( + notificationConfigInfo.notificationConfig, + notificationConfigInfo.configId, + ) try { // TODO: recreate user object to pass along the same permissions. Make sure this works when user based security is removed client.threadPool().threadContext.stashContext().use { if (userStr.isNotBlank()) { - client.threadPool().threadContext + client + .threadPool() + .threadContext .putTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, userStr) } val createResponse = createNotificationConfig(client, createNotificationConfigRequest) @@ -136,7 +140,7 @@ class DestinationMigrationUtilService { logger.warn( "Failed to migrate over Destination ${notificationConfigInfo.configId} because failed to " + "create channel in Notification plugin.", - e + e, ) } } @@ -144,26 +148,33 @@ class DestinationMigrationUtilService { return migratedNotificationConfigs } - private suspend fun retrieveConfigsToMigrate(client: NodeClient, configName: String): List> { + private suspend fun retrieveConfigsToMigrate( + client: NodeClient, + configName: String, + ): List> { var start = 0 val size = 100 val notificationConfigInfoList = mutableListOf>() var hasMoreResults = true while (hasMoreResults) { - val searchSourceBuilder = SearchSourceBuilder() - .size(size) - .from(start) - .fetchSource(FetchSourceContext(true, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)) - .seqNoAndPrimaryTerm(true) - .version(true) - val queryBuilder = QueryBuilders.boolQuery() - .should(QueryBuilders.existsQuery(configName)) + val searchSourceBuilder = + SearchSourceBuilder() + .size(size) + .from(start) + .fetchSource(FetchSourceContext(true, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY)) + .seqNoAndPrimaryTerm(true) + .version(true) + val queryBuilder = + QueryBuilders + .boolQuery() + .should(QueryBuilders.existsQuery(configName)) searchSourceBuilder.query(queryBuilder) - val searchRequest = SearchRequest() - .source(searchSourceBuilder) - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + val searchRequest = + SearchRequest() + .source(searchSourceBuilder) + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) val response: SearchResponse = client.suspendUntil { client.search(searchRequest, it) } if (response.status() != RestStatus.OK) { @@ -174,8 +185,10 @@ class DestinationMigrationUtilService { hasMoreResults = false } for (hit in response.hits) { - val xcp = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) + val xcp = + XContentType.JSON + .xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, hit.sourceAsString) var notificationConfig: NotificationConfig? = null var userStr = "" when (configName) { @@ -183,39 +196,46 @@ class DestinationMigrationUtilService { val emailGroup = EmailGroup.parseWithType(xcp, hit.id, hit.version) notificationConfig = convertEmailGroupToNotificationConfig(emailGroup) } + "email_account" -> { val emailAccount = EmailAccount.parseWithType(xcp, hit.id, hit.version) notificationConfig = convertEmailAccountToNotificationConfig(emailAccount) } + "destination" -> { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val destination = Destination.parse( - xcp, - hit.id, - hit.version, - hit.seqNo.toInt(), - hit.primaryTerm.toInt() - ) + val destination = + Destination.parse( + xcp, + hit.id, + hit.version, + hit.seqNo.toInt(), + hit.primaryTerm.toInt(), + ) userStr = destination.user.toString() notificationConfig = convertDestinationToNotificationConfig(destination) } - else -> logger.info("Unrecognized config name [$configName] to migrate") + + else -> { + logger.info("Unrecognized config name [$configName] to migrate") + } } - if (notificationConfig != null) + if (notificationConfig != null) { notificationConfigInfoList.add( Pair( NotificationConfigInfo( hit.id, Instant.now(), Instant.now(), - notificationConfig + notificationConfig, ), - userStr - ) + userStr, + ), ) + } } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/NotificationApiUtils.kt b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/NotificationApiUtils.kt index 1698de794..95a4939c5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/NotificationApiUtils.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/util/destinationmigration/NotificationApiUtils.kt @@ -33,9 +33,7 @@ import org.opensearch.transport.client.Client import org.opensearch.transport.client.node.NodeClient class NotificationApiUtils { - companion object { - private val logger = LogManager.getLogger(NotificationApiUtils::class) private val defaultRetryPolicy = @@ -44,8 +42,11 @@ class NotificationApiUtils { /** * Gets a NotificationConfigInfo object by ID if it exists. */ - suspend fun getNotificationConfigInfo(client: NodeClient, id: String): NotificationConfigInfo? { - return try { + suspend fun getNotificationConfigInfo( + client: NodeClient, + id: String, + ): NotificationConfigInfo? = + try { val res: GetNotificationConfigResponse = getNotificationConfig(client, GetNotificationConfigRequest(setOf(id))) res.searchResult.objectList.firstOrNull() } catch (e: OpenSearchSecurityException) { @@ -56,40 +57,44 @@ class NotificationApiUtils { } null } - } private suspend fun getNotificationConfig( client: NodeClient, - getNotificationConfigRequest: GetNotificationConfigRequest + getNotificationConfigRequest: GetNotificationConfigRequest, ): GetNotificationConfigResponse { - val getNotificationConfigResponse: GetNotificationConfigResponse = NotificationsPluginInterface.suspendUntil { - this.getNotificationConfig( - client, - getNotificationConfigRequest, - it - ) - } + val getNotificationConfigResponse: GetNotificationConfigResponse = + NotificationsPluginInterface.suspendUntil { + this.getNotificationConfig( + client, + getNotificationConfigRequest, + it, + ) + } return getNotificationConfigResponse } suspend fun createNotificationConfig( client: NodeClient, createNotificationConfigRequest: CreateNotificationConfigRequest, - retryPolicy: BackoffPolicy = defaultRetryPolicy + retryPolicy: BackoffPolicy = defaultRetryPolicy, ): CreateNotificationConfigResponse { lateinit var createNotificationConfigResponse: CreateNotificationConfigResponse - val userStr = client.threadPool().threadContext - .getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) + val userStr = + client + .threadPool() + .threadContext + .getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) client.threadPool().threadContext.stashContext().use { client.threadPool().threadContext.putTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, userStr) retryPolicy.retryForNotification(logger) { - createNotificationConfigResponse = NotificationsPluginInterface.suspendUntil { - this.createNotificationConfig( - client, - createNotificationConfigRequest, - it - ) - } + createNotificationConfigResponse = + NotificationsPluginInterface.suspendUntil { + this.createNotificationConfig( + client, + createNotificationConfigRequest, + it, + ) + } } } return createNotificationConfigResponse @@ -107,13 +112,14 @@ class NotificationApiUtils { */ suspend fun LegacyBaseMessage.publishLegacyNotification(client: Client): String { val baseMessage = this - val res: LegacyPublishNotificationResponse = NotificationsPluginInterface.suspendUntil { - this.publishLegacyNotification( - (client as NodeClient), - LegacyPublishNotificationRequest(baseMessage), - it - ) - } + val res: LegacyPublishNotificationResponse = + NotificationsPluginInterface.suspendUntil { + this.publishLegacyNotification( + (client as NodeClient), + LegacyPublishNotificationRequest(baseMessage), + it, + ) + } validateResponseStatus(RestStatus.fromCode(res.destinationResponse.statusCode), res.destinationResponse.responseContent) return res.destinationResponse.responseContent } @@ -121,17 +127,22 @@ suspend fun LegacyBaseMessage.publishLegacyNotification(client: Client): String /** * Extension function for publishing a notification to a channel in the Notification plugin. */ -suspend fun NotificationConfigInfo.sendNotification(client: Client, title: String, compiledMessage: String): String { +suspend fun NotificationConfigInfo.sendNotification( + client: Client, + title: String, + compiledMessage: String, +): String { val config = this - val res: SendNotificationResponse = NotificationsPluginInterface.suspendUntil { - this.sendNotification( - (client as NodeClient), - EventSource(title, config.configId, SeverityType.INFO), - ChannelMessage(compiledMessage, null, null), - listOf(config.configId), - it - ) - } + val res: SendNotificationResponse = + NotificationsPluginInterface.suspendUntil { + this.sendNotification( + (client as NodeClient), + EventSource(title, config.configId, SeverityType.INFO), + ChannelMessage(compiledMessage, null, null), + listOf(config.configId), + it, + ) + } validateResponseStatus(res.getStatus(), res.notificationEvent.toString()) return res.notificationEvent.toString() } @@ -147,15 +158,23 @@ fun NotificationConfigInfo.getTitle(subject: String?): String { /** * All valid response statuses. */ -private val VALID_RESPONSE_STATUS = setOf( - RestStatus.OK.status, RestStatus.CREATED.status, RestStatus.ACCEPTED.status, - RestStatus.NON_AUTHORITATIVE_INFORMATION.status, RestStatus.NO_CONTENT.status, - RestStatus.RESET_CONTENT.status, RestStatus.PARTIAL_CONTENT.status, - RestStatus.MULTI_STATUS.status -) +private val VALID_RESPONSE_STATUS = + setOf( + RestStatus.OK.status, + RestStatus.CREATED.status, + RestStatus.ACCEPTED.status, + RestStatus.NON_AUTHORITATIVE_INFORMATION.status, + RestStatus.NO_CONTENT.status, + RestStatus.RESET_CONTENT.status, + RestStatus.PARTIAL_CONTENT.status, + RestStatus.MULTI_STATUS.status, + ) @Throws(OpenSearchStatusException::class) -fun validateResponseStatus(restStatus: RestStatus, responseContent: String) { +fun validateResponseStatus( + restStatus: RestStatus, + responseContent: String, +) { if (!VALID_RESPONSE_STATUS.contains(restStatus.status)) { throw OpenSearchStatusException("Failed: $responseContent", restStatus) } @@ -166,4 +185,7 @@ fun validateResponseStatus(restStatus: RestStatus, responseContent: String) { * This is used since an ID being referenced in a Monitor action could be either config depending on if * it's prior to or after migration. */ -data class NotificationActionConfigs(val destination: Destination?, val channel: NotificationConfigInfo?) +data class NotificationActionConfigs( + val destination: Destination?, + val channel: NotificationConfigInfo?, +) diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt index 1e613bd0f..e4fe8aee5 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/CompositeWorkflowRunner.kt @@ -47,7 +47,6 @@ import java.time.ZoneOffset import java.util.UUID object CompositeWorkflowRunner : WorkflowRunner() { - private val logger = LogManager.getLogger(javaClass) override suspend fun runWorkflow( @@ -56,7 +55,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { periodStart: Instant, periodEnd: Instant, dryRun: Boolean, - transportService: TransportService + transportService: TransportService, ): WorkflowRunResult { val workflowExecutionStartTime = Instant.now() @@ -64,11 +63,12 @@ object CompositeWorkflowRunner : WorkflowRunner() { val executionId = generateExecutionId(isTempWorkflow, workflow) - val (workflowMetadata, _) = WorkflowMetadataService.getOrCreateWorkflowMetadata( - workflow = workflow, - skipIndex = isTempWorkflow, - executionId = executionId - ) + val (workflowMetadata, _) = + WorkflowMetadataService.getOrCreateWorkflowMetadata( + workflow = workflow, + skipIndex = isTempWorkflow, + executionId = executionId, + ) var dataSources: DataSources? = null logger.debug("Workflow ${workflow.id} in $executionId execution is running") val delegates = (workflow.inputs[0] as CompositeInput).sequence.delegates.sortedBy { it.order } @@ -85,7 +85,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { workflowExecutionStartTime, Instant.now(), executionId, - AlertingException.wrap(e) + AlertingException.wrap(e), ) } // Validate the monitors size @@ -99,7 +99,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { var delegateMonitor: Monitor delegateMonitor = monitorsById[delegate.monitorId] ?: throw AlertingException.wrap( - IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id") + IllegalStateException("Delegate monitor not found ${delegate.monitorId} for the workflow $workflow.id"), ) if (delegate.chainedMonitorFindings != null) { val chainedMonitorIds: MutableList = mutableListOf() @@ -110,10 +110,13 @@ object CompositeWorkflowRunner : WorkflowRunner() { } val chainedMonitors = mutableListOf() chainedMonitorIds.forEach { - val chainedMonitor = monitorsById[it] - ?: throw AlertingException.wrap( - IllegalStateException("Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id") - ) + val chainedMonitor = + monitorsById[it] + ?: throw AlertingException.wrap( + IllegalStateException( + "Chained finding monitor not found ${delegate.monitorId} for the workflow $workflow.id", + ), + ) chainedMonitors.add(chainedMonitor) } @@ -122,20 +125,30 @@ object CompositeWorkflowRunner : WorkflowRunner() { } catch (e: Exception) { logger.error("Failed to execute workflow due to failure in chained findings. Error: ${e.message}", e) return WorkflowRunResult( - workflow.id, workflow.name, emptyList(), workflowExecutionStartTime, Instant.now(), executionId, - AlertingException.wrap(e) + workflow.id, + workflow.name, + emptyList(), + workflowExecutionStartTime, + Instant.now(), + executionId, + AlertingException.wrap(e), ) } } - val workflowRunContext = WorkflowRunContext( - workflowId = workflowMetadata.workflowId, - workflowMetadataId = workflowMetadata.id, - chainedMonitorId = delegate.chainedMonitorFindings?.monitorId, - matchingDocIdsPerIndex = indexToDocIdsWithFindings!!.first, - auditDelegateMonitorAlerts = if (workflow.auditDelegateMonitorAlerts == null) true - else workflow.auditDelegateMonitorAlerts!!, - findingIds = indexToDocIdsWithFindings.second - ) + val workflowRunContext = + WorkflowRunContext( + workflowId = workflowMetadata.workflowId, + workflowMetadataId = workflowMetadata.id, + chainedMonitorId = delegate.chainedMonitorFindings?.monitorId, + matchingDocIdsPerIndex = indexToDocIdsWithFindings!!.first, + auditDelegateMonitorAlerts = + if (workflow.auditDelegateMonitorAlerts == null) { + true + } else { + workflow.auditDelegateMonitorAlerts!! + }, + findingIds = indexToDocIdsWithFindings.second, + ) try { dataSources = delegateMonitor.dataSources val delegateRunResult = @@ -147,7 +160,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun, workflowRunContext, executionId, - transportService + transportService, ) resultList.add(delegateRunResult!!) } catch (ex: Exception) { @@ -161,31 +174,33 @@ object CompositeWorkflowRunner : WorkflowRunner() { if (!isTempWorkflow) { WorkflowMetadataService.upsertWorkflowMetadata( workflowMetadata.copy(latestRunTime = workflowExecutionStartTime, latestExecutionId = executionId), - true + true, ) } val triggerResults = mutableMapOf() - val workflowRunResult = WorkflowRunResult( - workflowId = workflow.id, - workflowName = workflow.name, - monitorRunResults = resultList, - executionStartTime = workflowExecutionStartTime, - executionEndTime = null, - executionId = executionId, - error = lastErrorDelegateRun, - triggerResults = triggerResults - ) - val currentAlerts = try { - monitorCtx.alertIndices!!.createOrUpdateAlertIndex(dataSources!!) - monitorCtx.alertIndices!!.createOrUpdateInitialAlertHistoryIndex(dataSources) - monitorCtx.alertService!!.loadCurrentAlertsForWorkflow(workflow, dataSources) - } catch (e: Exception) { - logger.error("Failed to fetch current alerts for workflow", e) - // We can't save ERROR alerts to the index here as we don't know if there are existing ACTIVE alerts - val id = if (workflow.id.trim().isEmpty()) "_na_" else workflow.id - logger.error("Error loading alerts for workflow: $id", e) - return workflowRunResult.copy(error = e) - } + val workflowRunResult = + WorkflowRunResult( + workflowId = workflow.id, + workflowName = workflow.name, + monitorRunResults = resultList, + executionStartTime = workflowExecutionStartTime, + executionEndTime = null, + executionId = executionId, + error = lastErrorDelegateRun, + triggerResults = triggerResults, + ) + val currentAlerts = + try { + monitorCtx.alertIndices!!.createOrUpdateAlertIndex(dataSources!!) + monitorCtx.alertIndices!!.createOrUpdateInitialAlertHistoryIndex(dataSources) + monitorCtx.alertService!!.loadCurrentAlertsForWorkflow(workflow, dataSources) + } catch (e: Exception) { + logger.error("Failed to fetch current alerts for workflow", e) + // We can't save ERROR alerts to the index here as we don't know if there are existing ACTIVE alerts + val id = if (workflow.id.trim().isEmpty()) "_na_" else workflow.id + logger.error("Error loading alerts for workflow: $id", e) + return workflowRunResult.copy(error = e) + } try { monitorCtx.alertIndices!!.createOrUpdateAlertIndex(dataSources) val updatedAlerts = mutableListOf() @@ -193,16 +208,17 @@ object CompositeWorkflowRunner : WorkflowRunner() { for (trigger in workflow.triggers) { val currentAlert = currentAlerts[trigger] val caTrigger = trigger as ChainedAlertTrigger - val triggerCtx = ChainedAlertTriggerExecutionContext( - workflow = workflow, - workflowRunResult = workflowRunResult, - periodStart = workflowRunResult.executionStartTime, - periodEnd = workflowRunResult.executionEndTime, - trigger = caTrigger, - alertGeneratingMonitors = monitorIdToAlertIdsMap.keys, - monitorIdToAlertIdsMap = monitorIdToAlertIdsMap, - alert = currentAlert - ) + val triggerCtx = + ChainedAlertTriggerExecutionContext( + workflow = workflow, + workflowRunResult = workflowRunResult, + periodStart = workflowRunResult.executionStartTime, + periodEnd = workflowRunResult.executionEndTime, + trigger = caTrigger, + alertGeneratingMonitors = monitorIdToAlertIdsMap.keys, + monitorIdToAlertIdsMap = monitorIdToAlertIdsMap, + alert = currentAlert, + ) runChainedAlertTrigger( monitorCtx, workflow, @@ -211,7 +227,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { triggerCtx, dryRun, triggerResults, - updatedAlerts + updatedAlerts, ) } if (!dryRun && workflow.id != Workflow.NO_ID && updatedAlerts.isNotEmpty()) { @@ -220,7 +236,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { dataSources, updatedAlerts, it, - routingId = workflow.id + routingId = workflow.id, ) } } @@ -236,7 +252,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { executionEndTime = Instant.now(), executionId = executionId, error = AlertingException.wrap(e), - triggerResults = emptyMap() + triggerResults = emptyMap(), ) } workflowRunResult.executionEndTime = Instant.now() @@ -251,9 +267,8 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun: Boolean, workflowRunContext: WorkflowRunContext, executionId: String, - transportService: TransportService + transportService: TransportService, ): MonitorRunResult<*>? { - if (delegateMonitor.isBucketLevelMonitor()) { return BucketLevelMonitorRunner.runMonitor( delegateMonitor, @@ -263,7 +278,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun, workflowRunContext, executionId, - transportService + transportService, ) } else if (delegateMonitor.isDocLevelMonitor()) { return DocumentLevelMonitorRunner().runMonitor( @@ -274,7 +289,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun, workflowRunContext, executionId, - transportService + transportService, ) } else if (delegateMonitor.isQueryLevelMonitor()) { return QueryLevelMonitorRunner.runMonitor( @@ -285,11 +300,11 @@ object CompositeWorkflowRunner : WorkflowRunner() { dryRun, workflowRunContext, executionId, - transportService + transportService, ) } else { throw AlertingException.wrap( - IllegalStateException("Unsupported monitor type ${delegateMonitor.monitorType}") + IllegalStateException("Unsupported monitor type ${delegateMonitor.monitorType}"), ) } } @@ -311,7 +326,7 @@ object CompositeWorkflowRunner : WorkflowRunner() { val diffMonitorIds = delegates.map { it.monitorId }.minus(monitors.map { it.id }.toSet()).joinToString() logger.error("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") throw AlertingException.wrap( - IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id") + IllegalStateException("Delegate monitors don't exist $diffMonitorIds for the workflow $workflow.id"), ) } } @@ -326,9 +341,13 @@ object CompositeWorkflowRunner : WorkflowRunner() { triggerResults: MutableMap, updatedAlerts: MutableList, ) { - val triggerRunResult = monitorCtx.triggerService!!.runChainedAlertTrigger( - workflow, trigger, triggerCtx.alertGeneratingMonitors, triggerCtx.monitorIdToAlertIdsMap - ) + val triggerRunResult = + monitorCtx.triggerService!!.runChainedAlertTrigger( + workflow, + trigger, + triggerCtx.alertGeneratingMonitors, + triggerCtx.monitorIdToAlertIdsMap, + ) triggerResults[trigger.id] = triggerRunResult if (monitorCtx.triggerService!!.isChainedAlertTriggerActionable(triggerCtx, triggerRunResult)) { val actionCtx = triggerCtx @@ -336,9 +355,14 @@ object CompositeWorkflowRunner : WorkflowRunner() { triggerRunResult.actionResults[action.id] = this.runAction(action, actionCtx, monitorCtx, workflow, dryRun) } } - val alert = monitorCtx.alertService!!.composeChainedAlert( - triggerCtx, executionId, workflow, triggerRunResult.associatedAlertIds.toList(), triggerRunResult - ) + val alert = + monitorCtx.alertService!!.composeChainedAlert( + triggerCtx, + executionId, + workflow, + triggerRunResult.associatedAlertIds.toList(), + triggerRunResult, + ) if (alert != null) { updatedAlerts.add(alert) } @@ -357,21 +381,26 @@ object CompositeWorkflowRunner : WorkflowRunner() { val queryBuilder = boolQuery() queryBuilder.must(QueryBuilders.termQuery("execution_id", executionId)) queryBuilder.must(QueryBuilders.termQuery("state", getDelegateMonitorAlertState(workflow).name)) - val noErrorQuery = boolQuery() - .should(boolQuery().mustNot(existsQuery(Alert.ERROR_MESSAGE_FIELD))) - .should(termsQuery(Alert.ERROR_MESSAGE_FIELD, "")) + val noErrorQuery = + boolQuery() + .should(boolQuery().mustNot(existsQuery(Alert.ERROR_MESSAGE_FIELD))) + .should(termsQuery(Alert.ERROR_MESSAGE_FIELD, "")) queryBuilder.must(noErrorQuery) searchRequest.source().query(queryBuilder).size(9999) val searchResponse: SearchResponse = monitorCtx.client!!.suspendUntil { monitorCtx.client!!.search(searchRequest, it) } - val alerts = searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - monitorCtx.xContentRegistry, LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert - } + val alerts = + searchResponse.hits.map { hit -> + val xcp = + XContentHelper.createParser( + monitorCtx.xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert + } val map = mutableMapOf>() for (alert in alerts) { if (map.containsKey(alert.monitorId)) { @@ -391,19 +420,21 @@ object CompositeWorkflowRunner : WorkflowRunner() { dataSources: DataSources, workflow: Workflow, isAlertHistoryEnabled: Boolean, - ): String { - return if (workflow.triggers.isNotEmpty()) { + ): String = + if (workflow.triggers.isNotEmpty()) { if (isAlertHistoryEnabled) { dataSources.alertsHistoryIndex!! - } else dataSources.alertsIndex - } else dataSources.alertsIndex - } + } else { + dataSources.alertsIndex + } + } else { + dataSources.alertsIndex + } - fun getDelegateMonitorAlertState( - workflow: Workflow, - ): Alert.State { - return if (workflow.triggers.isNotEmpty()) { + fun getDelegateMonitorAlertState(workflow: Workflow): Alert.State = + if (workflow.triggers.isNotEmpty()) { Alert.State.AUDIT - } else Alert.State.ACTIVE - } + } else { + Alert.State.ACTIVE + } } diff --git a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt index 56a708444..981d402cf 100644 --- a/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt +++ b/alerting/src/main/kotlin/org/opensearch/alerting/workflow/WorkflowRunner.kt @@ -29,7 +29,7 @@ abstract class WorkflowRunner { periodStart: Instant, periodEnd: Instant, dryRun: Boolean, - transportService: TransportService + transportService: TransportService, ): WorkflowRunResult suspend fun runAction( @@ -37,16 +37,19 @@ abstract class WorkflowRunner { ctx: ChainedAlertTriggerExecutionContext, monitorCtx: MonitorRunnerExecutionContext, workflow: Workflow, - dryrun: Boolean + dryrun: Boolean, ): ActionRunResult { return try { if (!MonitorRunnerService.isActionActionable(action, ctx.alert)) { return ActionRunResult(action.id, action.name, mapOf(), true, null, null) } val actionOutput = mutableMapOf() - actionOutput[Action.SUBJECT] = if (action.subjectTemplate != null) { - compileTemplate(action.subjectTemplate!!, ctx) - } else "" + actionOutput[Action.SUBJECT] = + if (action.subjectTemplate != null) { + compileTemplate(action.subjectTemplate!!, ctx) + } else { + "" + } actionOutput[Action.MESSAGE] = compileTemplate(action.messageTemplate, ctx) if (Strings.isNullOrEmpty(actionOutput[Action.MESSAGE])) { throw IllegalStateException("Message content missing in the Destination with id: ${action.destinationId}") @@ -60,15 +63,16 @@ abstract class WorkflowRunner { monitorCtx.settings!!, monitorCtx.threadPool!!.threadContext, workflow.user?.roles, - workflow.user - ) + workflow.user, + ), ) { - actionOutput[Action.MESSAGE_ID] = getConfigAndSendNotification( - action, - monitorCtx, - actionOutput[Action.SUBJECT], - actionOutput[Action.MESSAGE]!! - ) + actionOutput[Action.MESSAGE_ID] = + getConfigAndSendNotification( + action, + monitorCtx, + actionOutput[Action.SUBJECT], + actionOutput[Action.MESSAGE]!!, + ) } } } @@ -78,9 +82,12 @@ abstract class WorkflowRunner { } } - internal fun compileTemplate(template: Script, ctx: ChainedAlertTriggerExecutionContext): String { - return MonitorRunnerService.monitorCtx.scriptService!!.compile(template, TemplateScript.CONTEXT) + internal fun compileTemplate( + template: Script, + ctx: ChainedAlertTriggerExecutionContext, + ): String = + MonitorRunnerService.monitorCtx.scriptService!! + .compile(template, TemplateScript.CONTEXT) .newInstance(template.params + mapOf("ctx" to ctx.asTemplateArg())) .execute() - } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt index 93c1675e9..8a88963d6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/ADTestHelpers.kt @@ -26,8 +26,8 @@ import java.time.temporal.ChronoUnit const val ANOMALY_DETECTOR_INDEX = ".opendistro-anomaly-detectors" const val ANOMALY_RESULT_INDEX = ".opendistro-anomaly-results*" -fun anomalyDetectorIndexMapping(): String { - return """ +fun anomalyDetectorIndexMapping(): String = + """ "properties": { "schema_version": { "type": "integer" @@ -172,10 +172,9 @@ fun anomalyDetectorIndexMapping(): String { } } """ -} -fun anomalyResultIndexMapping(): String { - return """ +fun anomalyResultIndexMapping(): String = + """ "properties": { "detector_id": { "type": "keyword" @@ -276,190 +275,211 @@ fun anomalyResultIndexMapping(): String { } } """ -} -fun randomAnomalyDetector(): String { - return """{ - "name" : "${OpenSearchTestCase.randomAlphaOfLength(10)}", - "description" : "${OpenSearchTestCase.randomAlphaOfLength(10)}", - "time_field" : "timestamp", - "indices" : [ - "${OpenSearchTestCase.randomAlphaOfLength(5)}" - ], - "filter_query" : { - "match_all" : { - "boost" : 1.0 - } - }, - "detection_interval" : { - "period" : { - "interval" : 1, - "unit" : "Minutes" - } - }, - "window_delay" : { - "period" : { - "interval" : 1, - "unit" : "Minutes" - } - }, - "shingle_size" : 8, - "feature_attributes" : [ - { - "feature_name" : "F1", - "feature_enabled" : true, - "aggregation_query" : { - "f_1" : { - "sum" : { - "field" : "value" +fun randomAnomalyDetector(): String = + """ + { + "name" : "${OpenSearchTestCase.randomAlphaOfLength(10)}", + "description" : "${OpenSearchTestCase.randomAlphaOfLength(10)}", + "time_field" : "timestamp", + "indices" : [ + "${OpenSearchTestCase.randomAlphaOfLength(5)}" + ], + "filter_query" : { + "match_all" : { + "boost" : 1.0 + } + }, + "detection_interval" : { + "period" : { + "interval" : 1, + "unit" : "Minutes" + } + }, + "window_delay" : { + "period" : { + "interval" : 1, + "unit" : "Minutes" + } + }, + "shingle_size" : 8, + "feature_attributes" : [ + { + "feature_name" : "F1", + "feature_enabled" : true, + "aggregation_query" : { + "f_1" : { + "sum" : { + "field" : "value" + } } } } - } - ] - } + ] + } """.trimIndent() -} -fun randomAnomalyDetectorWithUser(backendRole: String): String { - return """{ - "name" : "${OpenSearchTestCase.randomAlphaOfLength(5)}", - "description" : "${OpenSearchTestCase.randomAlphaOfLength(10)}", - "time_field" : "timestamp", - "indices" : [ - "${OpenSearchTestCase.randomAlphaOfLength(5)}" - ], - "filter_query" : { - "match_all" : { - "boost" : 1.0 - } - }, - "detection_interval" : { - "period" : { - "interval" : 1, - "unit" : "Minutes" - } - }, - "window_delay" : { - "period" : { - "interval" : 1, - "unit" : "Minutes" - } - }, - "shingle_size" : 8, - "feature_attributes" : [ - { - "feature_name" : "F1", - "feature_enabled" : true, - "aggregation_query" : { - "f_1" : { - "sum" : { - "field" : "value" +fun randomAnomalyDetectorWithUser(backendRole: String): String = + """ + { + "name" : "${OpenSearchTestCase.randomAlphaOfLength(5)}", + "description" : "${OpenSearchTestCase.randomAlphaOfLength(10)}", + "time_field" : "timestamp", + "indices" : [ + "${OpenSearchTestCase.randomAlphaOfLength(5)}" + ], + "filter_query" : { + "match_all" : { + "boost" : 1.0 + } + }, + "detection_interval" : { + "period" : { + "interval" : 1, + "unit" : "Minutes" + } + }, + "window_delay" : { + "period" : { + "interval" : 1, + "unit" : "Minutes" + } + }, + "shingle_size" : 8, + "feature_attributes" : [ + { + "feature_name" : "F1", + "feature_enabled" : true, + "aggregation_query" : { + "f_1" : { + "sum" : { + "field" : "value" + } } } } - } - ], - "user" : { - "name" : "${OpenSearchTestCase.randomAlphaOfLength(5)}", - "backend_roles" : [ "$backendRole" ], - "roles" : [ - "${OpenSearchTestCase.randomAlphaOfLength(5)}" ], - "custom_attribute_names" : [ ] + "user" : { + "name" : "${OpenSearchTestCase.randomAlphaOfLength(5)}", + "backend_roles" : [ "$backendRole" ], + "roles" : [ + "${OpenSearchTestCase.randomAlphaOfLength(5)}" + ], + "custom_attribute_names" : [ ] + } } - } """.trimIndent() -} fun randomAnomalyResult( detectorId: String = OpenSearchTestCase.randomAlphaOfLength(10), - dataStartTime: Long = ZonedDateTime.now().minus(2, ChronoUnit.MINUTES).toInstant().toEpochMilli(), + dataStartTime: Long = + ZonedDateTime + .now() + .minus(2, ChronoUnit.MINUTES) + .toInstant() + .toEpochMilli(), dataEndTime: Long = ZonedDateTime.now().toInstant().toEpochMilli(), featureId: String = OpenSearchTestCase.randomAlphaOfLength(5), featureName: String = OpenSearchTestCase.randomAlphaOfLength(5), featureData: Double = OpenSearchTestCase.randomDouble(), - executionStartTime: Long = ZonedDateTime.now().minus(10, ChronoUnit.SECONDS).toInstant().toEpochMilli(), + executionStartTime: Long = + ZonedDateTime + .now() + .minus(10, ChronoUnit.SECONDS) + .toInstant() + .toEpochMilli(), executionEndTime: Long = ZonedDateTime.now().toInstant().toEpochMilli(), anomalyScore: Double = OpenSearchTestCase.randomDouble(), anomalyGrade: Double = OpenSearchTestCase.randomDouble(), confidence: Double = OpenSearchTestCase.randomDouble(), - user: User = randomUser() -): String { - return """{ - "detector_id" : "$detectorId", - "data_start_time" : $dataStartTime, - "data_end_time" : $dataEndTime, - "feature_data" : [ - { - "feature_id" : "$featureId", - "feature_name" : "$featureName", - "data" : $featureData - } - ], - "execution_start_time" : $executionStartTime, - "execution_end_time" : $executionEndTime, - "anomaly_score" : $anomalyScore, - "anomaly_grade" : $anomalyGrade, - "confidence" : $confidence, - "user" : { - "name" : "${user.name}", - "backend_roles" : [ - ${user.backendRoles.joinToString { "\"${it}\"" }} - ], - "roles" : [ - ${user.roles.joinToString { "\"${it}\"" }} - ], - "custom_attribute_names" : [ - ${user.customAttNames.joinToString { "\"${it}\"" }} - ] - } + user: User = randomUser(), +): String = + """ + { + "detector_id" : "$detectorId", + "data_start_time" : $dataStartTime, + "data_end_time" : $dataEndTime, + "feature_data" : [ + { + "feature_id" : "$featureId", + "feature_name" : "$featureName", + "data" : $featureData } + ], + "execution_start_time" : $executionStartTime, + "execution_end_time" : $executionEndTime, + "anomaly_score" : $anomalyScore, + "anomaly_grade" : $anomalyGrade, + "confidence" : $confidence, + "user" : { + "name" : "${user.name}", + "backend_roles" : [ + ${user.backendRoles.joinToString { "\"${it}\"" }} + ], + "roles" : [ + ${user.roles.joinToString { "\"${it}\"" }} + ], + "custom_attribute_names" : [ + ${user.customAttNames.joinToString { "\"${it}\"" }} + ] + } + } """.trimIndent() -} fun randomAnomalyResultWithoutUser( detectorId: String = OpenSearchTestCase.randomAlphaOfLength(10), - dataStartTime: Long = ZonedDateTime.now().minus(2, ChronoUnit.MINUTES).toInstant().toEpochMilli(), + dataStartTime: Long = + ZonedDateTime + .now() + .minus(2, ChronoUnit.MINUTES) + .toInstant() + .toEpochMilli(), dataEndTime: Long = ZonedDateTime.now().toInstant().toEpochMilli(), featureId: String = OpenSearchTestCase.randomAlphaOfLength(5), featureName: String = OpenSearchTestCase.randomAlphaOfLength(5), featureData: Double = OpenSearchTestCase.randomDouble(), - executionStartTime: Long = ZonedDateTime.now().minus(10, ChronoUnit.SECONDS).toInstant().toEpochMilli(), + executionStartTime: Long = + ZonedDateTime + .now() + .minus(10, ChronoUnit.SECONDS) + .toInstant() + .toEpochMilli(), executionEndTime: Long = ZonedDateTime.now().toInstant().toEpochMilli(), anomalyScore: Double = OpenSearchTestCase.randomDouble(), anomalyGrade: Double = OpenSearchTestCase.randomDouble(), - confidence: Double = OpenSearchTestCase.randomDouble() -): String { - return """{ - "detector_id" : "$detectorId", - "data_start_time" : $dataStartTime, - "data_end_time" : $dataEndTime, - "feature_data" : [ - { - "feature_id" : "$featureId", - "feature_name" : "$featureName", - "data" : $featureData - } - ], - "execution_start_time" : $executionStartTime, - "execution_end_time" : $executionEndTime, - "anomaly_score" : $anomalyScore, - "anomaly_grade" : $anomalyGrade, - "confidence" : $confidence + confidence: Double = OpenSearchTestCase.randomDouble(), +): String = + """ + { + "detector_id" : "$detectorId", + "data_start_time" : $dataStartTime, + "data_end_time" : $dataEndTime, + "feature_data" : [ + { + "feature_id" : "$featureId", + "feature_name" : "$featureName", + "data" : $featureData } + ], + "execution_start_time" : $executionStartTime, + "execution_end_time" : $executionEndTime, + "anomaly_score" : $anomalyScore, + "anomaly_grade" : $anomalyGrade, + "confidence" : $confidence + } """.trimIndent() -} fun maxAnomalyGradeSearchInput( adResultIndex: String = ".opendistro-anomaly-results-history", detectorId: String = OpenSearchTestCase.randomAlphaOfLength(10), - size: Int = 1 + size: Int = 1, ): SearchInput { - val rangeQuery = QueryBuilders.rangeQuery("execution_end_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val rangeQuery = + QueryBuilders + .rangeQuery("execution_end_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") val termQuery = QueryBuilders.termQuery("detector_id", detectorId) var boolQueryBuilder = BoolQueryBuilder() @@ -471,16 +491,16 @@ fun maxAnomalyGradeSearchInput( } fun adMonitorTrigger(): QueryLevelTrigger { - val triggerScript = """ - return ctx.results[0].aggregations.max_anomaly_grade.value != null && - ctx.results[0].aggregations.max_anomaly_grade.value > 0.7 - """.trimIndent() + val triggerScript = + """ + return ctx.results[0].aggregations.max_anomaly_grade.value != null && + ctx.results[0].aggregations.max_anomaly_grade.value > 0.7 + """.trimIndent() return randomQueryLevelTrigger(condition = Script(triggerScript)) } -fun adSearchInput(detectorId: String): SearchInput { - return maxAnomalyGradeSearchInput(adResultIndex = ANOMALY_RESULT_INDEX, detectorId = detectorId, size = 10) -} +fun adSearchInput(detectorId: String): SearchInput = + maxAnomalyGradeSearchInput(adResultIndex = ANOMALY_RESULT_INDEX, detectorId = detectorId, size = 10) fun randomADMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), @@ -491,18 +511,25 @@ fun randomADMonitor( triggers: List = (1..OpenSearchTestCase.randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, - user = user, uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + withMetadata: Boolean = false, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), ) -} -fun randomADUser(backendRole: String = OpenSearchRestTestCase.randomAlphaOfLength(10)): User { - return User( - OpenSearchRestTestCase.randomAlphaOfLength(10), listOf(backendRole), - listOf(OpenSearchRestTestCase.randomAlphaOfLength(10), ALL_ACCESS_ROLE), mapOf("test_attr" to "test") +fun randomADUser(backendRole: String = OpenSearchRestTestCase.randomAlphaOfLength(10)): User = + User( + OpenSearchRestTestCase.randomAlphaOfLength(10), + listOf(backendRole), + listOf(OpenSearchRestTestCase.randomAlphaOfLength(10), ALL_ACCESS_ROLE), + mapOf("test_attr" to "test"), ) -} diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/AccessRoles.kt b/alerting/src/test/kotlin/org/opensearch/alerting/AccessRoles.kt index 133504168..26cfc8424 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/AccessRoles.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/AccessRoles.kt @@ -31,22 +31,23 @@ val ALERTING_GET_DESTINATION_ACCESS = "alerting_get_destination_access" val ALERTING_GET_ALERTS_ACCESS = "alerting_get_alerts_access" val ALERTING_INDEX_WORKFLOW_ACCESS = "alerting_index_workflow_access" -val ROLE_TO_PERMISSION_MAPPING = mapOf( - ALL_ACCESS_ROLE to "*", - ALERTING_NO_ACCESS_ROLE to "", - ALERTING_GET_EMAIL_ACCOUNT_ACCESS to "cluster:admin/opendistro/alerting/destination/email_account/get", - ALERTING_SEARCH_EMAIL_ACCOUNT_ACCESS to "cluster:admin/opendistro/alerting/destination/email_account/search", - ALERTING_GET_EMAIL_GROUP_ACCESS to "cluster:admin/opendistro/alerting/destination/email_group/get", - ALERTING_SEARCH_EMAIL_GROUP_ACCESS to "cluster:admin/opendistro/alerting/destination/email_group/search", - ALERTING_INDEX_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/write", - ALERTING_GET_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/get", - ALERTING_GET_WORKFLOW_ACCESS to AlertingActions.GET_WORKFLOW_ACTION_NAME, - ALERTING_SEARCH_MONITOR_ONLY_ACCESS to "cluster:admin/opendistro/alerting/monitor/search", - ALERTING_EXECUTE_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/execute", - ALERTING_EXECUTE_WORKFLOW_ACCESS to ExecuteWorkflowAction.NAME, - ALERTING_DELETE_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/delete", - ALERTING_GET_DESTINATION_ACCESS to "cluster:admin/opendistro/alerting/destination/get", - ALERTING_GET_ALERTS_ACCESS to "cluster:admin/opendistro/alerting/alerts/get", - ALERTING_INDEX_WORKFLOW_ACCESS to AlertingActions.INDEX_WORKFLOW_ACTION_NAME, - ALERTING_DELETE_WORKFLOW_ACCESS to AlertingActions.DELETE_WORKFLOW_ACTION_NAME -) +val ROLE_TO_PERMISSION_MAPPING = + mapOf( + ALL_ACCESS_ROLE to "*", + ALERTING_NO_ACCESS_ROLE to "", + ALERTING_GET_EMAIL_ACCOUNT_ACCESS to "cluster:admin/opendistro/alerting/destination/email_account/get", + ALERTING_SEARCH_EMAIL_ACCOUNT_ACCESS to "cluster:admin/opendistro/alerting/destination/email_account/search", + ALERTING_GET_EMAIL_GROUP_ACCESS to "cluster:admin/opendistro/alerting/destination/email_group/get", + ALERTING_SEARCH_EMAIL_GROUP_ACCESS to "cluster:admin/opendistro/alerting/destination/email_group/search", + ALERTING_INDEX_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/write", + ALERTING_GET_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/get", + ALERTING_GET_WORKFLOW_ACCESS to AlertingActions.GET_WORKFLOW_ACTION_NAME, + ALERTING_SEARCH_MONITOR_ONLY_ACCESS to "cluster:admin/opendistro/alerting/monitor/search", + ALERTING_EXECUTE_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/execute", + ALERTING_EXECUTE_WORKFLOW_ACCESS to ExecuteWorkflowAction.NAME, + ALERTING_DELETE_MONITOR_ACCESS to "cluster:admin/opendistro/alerting/monitor/delete", + ALERTING_GET_DESTINATION_ACCESS to "cluster:admin/opendistro/alerting/destination/get", + ALERTING_GET_ALERTS_ACCESS to "cluster:admin/opendistro/alerting/alerts/get", + ALERTING_INDEX_WORKFLOW_ACCESS to AlertingActions.INDEX_WORKFLOW_ACTION_NAME, + ALERTING_DELETE_WORKFLOW_ACCESS to AlertingActions.DELETE_WORKFLOW_ACTION_NAME, + ) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt index aacfa58a8..d021e9844 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/AlertServiceTests.kt @@ -30,7 +30,6 @@ import java.time.Instant import java.time.temporal.ChronoUnit class AlertServiceTests : OpenSearchTestCase() { - private lateinit var client: Client private lateinit var xContentRegistry: NamedXContentRegistry private lateinit var settings: Settings @@ -75,25 +74,33 @@ class AlertServiceTests : OpenSearchTestCase() { val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) val currentAlerts = mutableMapOf() - val aggResultBuckets = createAggregationResultBucketsFromBucketKeys( - listOf( - listOf("a"), - listOf("b") + val aggResultBuckets = + createAggregationResultBucketsFromBucketKeys( + listOf( + listOf("a"), + listOf("b"), + ), ) - ) - val categorizedAlerts = alertService.getCategorizedAlertsForBucketLevelMonitor( - monitor, trigger, currentAlerts, aggResultBuckets, emptyList(), "", null - ) + val categorizedAlerts = + alertService.getCategorizedAlertsForBucketLevelMonitor( + monitor, + trigger, + currentAlerts, + aggResultBuckets, + emptyList(), + "", + null, + ) // Completed Alerts are what remains in currentAlerts after categorization val completedAlerts = currentAlerts.values.toList() assertEquals(listOf(), categorizedAlerts[AlertCategory.DEDUPED]) assertAlertsExistForBucketKeys( listOf( listOf("a"), - listOf("b") + listOf("b"), ), - categorizedAlerts[AlertCategory.NEW] ?: error("New alerts not found") + categorizedAlerts[AlertCategory.NEW] ?: error("New alerts not found"), ) assertEquals(listOf(), completedAlerts) } @@ -102,31 +109,41 @@ class AlertServiceTests : OpenSearchTestCase() { val trigger = randomBucketLevelTrigger() val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val currentAlerts = createCurrentAlertsFromBucketKeys( - monitor, trigger, - listOf( - listOf("a"), - listOf("b") + val currentAlerts = + createCurrentAlertsFromBucketKeys( + monitor, + trigger, + listOf( + listOf("a"), + listOf("b"), + ), ) - ) - val aggResultBuckets = createAggregationResultBucketsFromBucketKeys( - listOf( - listOf("a"), - listOf("b") + val aggResultBuckets = + createAggregationResultBucketsFromBucketKeys( + listOf( + listOf("a"), + listOf("b"), + ), ) - ) - val categorizedAlerts = alertService.getCategorizedAlertsForBucketLevelMonitor( - monitor, trigger, currentAlerts, aggResultBuckets, emptyList(), "", null - ) + val categorizedAlerts = + alertService.getCategorizedAlertsForBucketLevelMonitor( + monitor, + trigger, + currentAlerts, + aggResultBuckets, + emptyList(), + "", + null, + ) // Completed Alerts are what remains in currentAlerts after categorization val completedAlerts = currentAlerts.values.toList() assertAlertsExistForBucketKeys( listOf( listOf("a"), - listOf("b") + listOf("b"), ), - categorizedAlerts[AlertCategory.DEDUPED] ?: error("Deduped alerts not found") + categorizedAlerts[AlertCategory.DEDUPED] ?: error("Deduped alerts not found"), ) assertEquals(listOf(), categorizedAlerts[AlertCategory.NEW]) assertEquals(listOf(), completedAlerts) @@ -136,18 +153,27 @@ class AlertServiceTests : OpenSearchTestCase() { val trigger = randomBucketLevelTrigger() val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val currentAlerts = createCurrentAlertsFromBucketKeys( - monitor, trigger, - listOf( - listOf("a"), - listOf("b") + val currentAlerts = + createCurrentAlertsFromBucketKeys( + monitor, + trigger, + listOf( + listOf("a"), + listOf("b"), + ), ) - ) val aggResultBuckets = listOf() - val categorizedAlerts = alertService.getCategorizedAlertsForBucketLevelMonitor( - monitor, trigger, currentAlerts, aggResultBuckets, emptyList(), "", null - ) + val categorizedAlerts = + alertService.getCategorizedAlertsForBucketLevelMonitor( + monitor, + trigger, + currentAlerts, + aggResultBuckets, + emptyList(), + "", + null, + ) // Completed Alerts are what remains in currentAlerts after categorization val completedAlerts = currentAlerts.values.toList() assertEquals(listOf(), categorizedAlerts[AlertCategory.DEDUPED]) @@ -155,9 +181,9 @@ class AlertServiceTests : OpenSearchTestCase() { assertAlertsExistForBucketKeys( listOf( listOf("a"), - listOf("b") + listOf("b"), ), - completedAlerts + completedAlerts, ) } @@ -165,23 +191,33 @@ class AlertServiceTests : OpenSearchTestCase() { val trigger = randomBucketLevelTrigger() val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val currentAlerts = createCurrentAlertsFromBucketKeys( - monitor, trigger, - listOf( - listOf("a"), - listOf("b") + val currentAlerts = + createCurrentAlertsFromBucketKeys( + monitor, + trigger, + listOf( + listOf("a"), + listOf("b"), + ), ) - ) - val aggResultBuckets = createAggregationResultBucketsFromBucketKeys( - listOf( - listOf("b"), - listOf("c") + val aggResultBuckets = + createAggregationResultBucketsFromBucketKeys( + listOf( + listOf("b"), + listOf("c"), + ), ) - ) - val categorizedAlerts = alertService.getCategorizedAlertsForBucketLevelMonitor( - monitor, trigger, currentAlerts, aggResultBuckets, emptyList(), "", null - ) + val categorizedAlerts = + alertService.getCategorizedAlertsForBucketLevelMonitor( + monitor, + trigger, + currentAlerts, + aggResultBuckets, + emptyList(), + "", + null, + ) // Completed Alerts are what remains in currentAlerts after categorization val completedAlerts = currentAlerts.values.toList() assertAlertsExistForBucketKeys(listOf(listOf("b")), categorizedAlerts[AlertCategory.DEDUPED] ?: error("Deduped alerts not found")) @@ -193,21 +229,31 @@ class AlertServiceTests : OpenSearchTestCase() { val trigger = randomBucketLevelTrigger() val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val currentAlerts = createCurrentAlertsFromBucketKeys( - monitor, trigger, - listOf( - listOf("a") + val currentAlerts = + createCurrentAlertsFromBucketKeys( + monitor, + trigger, + listOf( + listOf("a"), + ), ) - ) - val aggResultBuckets = createAggregationResultBucketsFromBucketKeys( - listOf( - listOf("a"), + val aggResultBuckets = + createAggregationResultBucketsFromBucketKeys( + listOf( + listOf("a"), + ), ) - ) - val categorizedAlerts = alertService.getCategorizedAlertsForBucketLevelMonitor( - monitor, trigger, currentAlerts, aggResultBuckets, emptyList(), "", null - ) + val categorizedAlerts = + alertService.getCategorizedAlertsForBucketLevelMonitor( + monitor, + trigger, + currentAlerts, + aggResultBuckets, + emptyList(), + "", + null, + ) // Completed Alerts are what remains in currentAlerts after categorization val completedAlerts = currentAlerts.values.toList() assertAlertsExistForBucketKeys(listOf(listOf("a")), categorizedAlerts[AlertCategory.DEDUPED] ?: error("Deduped alerts not found")) @@ -218,23 +264,33 @@ class AlertServiceTests : OpenSearchTestCase() { private fun createCurrentAlertsFromBucketKeys( monitor: Monitor, trigger: BucketLevelTrigger, - bucketKeysList: List> - ): MutableMap { - return bucketKeysList.map { bucketKeys -> - val aggResultBucket = AggregationResultBucket("parent_bucket_path", bucketKeys, mapOf()) - val alert = Alert( - monitor, trigger, Instant.now().truncatedTo(ChronoUnit.MILLIS), null, - actionExecutionResults = listOf(randomActionExecutionResult()), aggregationResultBucket = aggResultBucket - ) - aggResultBucket.getBucketKeysHash() to alert - }.toMap().toMutableMap() - } + bucketKeysList: List>, + ): MutableMap = + bucketKeysList + .map { bucketKeys -> + val aggResultBucket = AggregationResultBucket("parent_bucket_path", bucketKeys, mapOf()) + val alert = + Alert( + monitor, + trigger, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + actionExecutionResults = listOf(randomActionExecutionResult()), + aggregationResultBucket = aggResultBucket, + ) + aggResultBucket.getBucketKeysHash() to alert + }.toMap() + .toMutableMap() - private fun createAggregationResultBucketsFromBucketKeys(bucketKeysList: List>): List { - return bucketKeysList.map { AggregationResultBucket("parent_bucket_path", it, mapOf()) } - } + private fun createAggregationResultBucketsFromBucketKeys(bucketKeysList: List>): List = + bucketKeysList.map { + AggregationResultBucket("parent_bucket_path", it, mapOf()) + } - private fun assertAlertsExistForBucketKeys(bucketKeysList: List>, alerts: List) { + private fun assertAlertsExistForBucketKeys( + bucketKeysList: List>, + alerts: List, + ) { // Check if size is equals first for sanity and since bucketKeysList should have unique entries, // this ensures there shouldn't be duplicates in the alerts assertEquals(bucketKeysList.size, alerts.size) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt index 97cf0afcf..88400d9e5 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/AlertingRestTestCase.kt @@ -95,7 +95,6 @@ import javax.management.remote.JMXServiceURL * Superclass for tests that interact with an external test cluster using OpenSearch's RestClient */ abstract class AlertingRestTestCase : ODFERestTestCase() { - protected val password = "D%LMX3bo#@U3XqVQ" protected val isDebuggingTest = DisableOnDebug(null).isDebugging @@ -106,8 +105,8 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected val statsResponseOpendistroSweeperEnabledField = "opendistro.scheduled_jobs.enabled" protected val statsResponseOpenSearchSweeperEnabledField = "plugins.scheduled_jobs.enabled" - override fun xContentRegistry(): NamedXContentRegistry { - return NamedXContentRegistry( + override fun xContentRegistry(): NamedXContentRegistry = + NamedXContentRegistry( mutableListOf( Monitor.XCONTENT_REGISTRY, MonitorV2.XCONTENT_REGISTRY, @@ -117,16 +116,16 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { BucketLevelTrigger.XCONTENT_REGISTRY, DocumentLevelTrigger.XCONTENT_REGISTRY, Workflow.XCONTENT_REGISTRY, - ChainedAlertTrigger.XCONTENT_REGISTRY - ) + SearchModule(Settings.EMPTY, emptyList()).namedXContents + ChainedAlertTrigger.XCONTENT_REGISTRY, + ) + SearchModule(Settings.EMPTY, emptyList()).namedXContents, ) - } - fun Response.asMap(): Map { - return entityAsMap(this) - } + fun Response.asMap(): Map = entityAsMap(this) - private fun createMonitorEntityWithBackendRoles(monitor: Monitor, rbacRoles: List?): HttpEntity { + private fun createMonitorEntityWithBackendRoles( + monitor: Monitor, + rbacRoles: List?, + ): HttpEntity { if (rbacRoles == null) { return monitor.toHttpEntity() } @@ -137,7 +136,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return StringEntity(jsonString, APPLICATION_JSON) } - private fun createMonitorV2EntityWithBackendRoles(monitorV2: MonitorV2, rbacRoles: List?): HttpEntity { + private fun createMonitorV2EntityWithBackendRoles( + monitorV2: MonitorV2, + rbacRoles: List?, + ): HttpEntity { if (rbacRoles == null) { return monitorV2.toHttpEntity() } @@ -154,16 +156,22 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { rbacRoles: List? = null, refresh: Boolean = true, ): Monitor { - val response = client.makeRequest( - "POST", "$ALERTING_BASE_URI?refresh=$refresh", emptyMap(), - createMonitorEntityWithBackendRoles(monitor, rbacRoles) - ) + val response = + client.makeRequest( + "POST", + "$ALERTING_BASE_URI?refresh=$refresh", + emptyMap(), + createMonitorEntityWithBackendRoles(monitor, rbacRoles), + ) assertEquals("Unable to create a new monitor", RestStatus.CREATED, response.restStatus()) - val monitorJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val monitorJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() assertUserNull(monitorJson as HashMap) return getMonitor(monitorId = monitorJson["_id"] as String) @@ -172,7 +180,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun createMonitorV2WithClient( client: RestClient, monitorV2: MonitorV2, - rbacRoles: List? = null + rbacRoles: List? = null, ): MonitorV2 { // every random ppl monitor's query searches index TEST_INDEX_NAME // by default, so create that first before creating the monitor @@ -183,24 +191,31 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { // be sure to use the passed in client to send the create monitor request, // as the user stored in this client is the user whose permissions we want // to test, not client()'s admin level user - val response = client.makeRequest( - "POST", MONITOR_V2_BASE_URI, emptyMap(), - createMonitorV2EntityWithBackendRoles(monitorV2, rbacRoles) - ) + val response = + client.makeRequest( + "POST", + MONITOR_V2_BASE_URI, + emptyMap(), + createMonitorV2EntityWithBackendRoles(monitorV2, rbacRoles), + ) assertEquals("Unable to create a new monitor v2", RestStatus.OK, response.restStatus()) - val monitorV2Json = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val monitorV2Json = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() assertUserNull(monitorV2Json as HashMap) return getMonitorV2(monitorV2Id = monitorV2Json["_id"] as String) } - protected fun createMonitor(monitor: Monitor, refresh: Boolean = true): Monitor { - return createMonitorWithClient(client(), monitor, emptyList(), refresh) - } + protected fun createMonitor( + monitor: Monitor, + refresh: Boolean = true, + ): Monitor = createMonitorWithClient(client(), monitor, emptyList(), refresh) protected fun createMonitorV2(monitorV2: MonitorV2): MonitorV2 { val client = client() @@ -210,32 +225,46 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return getMonitorV2(monitorV2Id = response.asMap()["_id"] as String) } - protected fun deleteMonitor(monitor: Monitor, refresh: Boolean = true): Response { - val response = client().makeRequest( - "DELETE", "$ALERTING_BASE_URI/${monitor.id}?refresh=$refresh", emptyMap(), - monitor.toHttpEntity() - ) + protected fun deleteMonitor( + monitor: Monitor, + refresh: Boolean = true, + ): Response { + val response = + client().makeRequest( + "DELETE", + "$ALERTING_BASE_URI/${monitor.id}?refresh=$refresh", + emptyMap(), + monitor.toHttpEntity(), + ) assertEquals("Unable to delete a monitor", RestStatus.OK, response.restStatus()) return response } protected fun deleteMonitorV2(monitorV2Id: String): Response { - val response = client().makeRequest( - "DELETE", "$MONITOR_V2_BASE_URI/$monitorV2Id?refresh=true", emptyMap() - ) + val response = + client().makeRequest( + "DELETE", + "$MONITOR_V2_BASE_URI/$monitorV2Id?refresh=true", + emptyMap(), + ) assertEquals("Unable to delete a monitor", RestStatus.OK, response.restStatus()) return response } - protected fun deleteWorkflow(workflow: Workflow, deleteDelegates: Boolean = false, refresh: Boolean = true): Response { - val response = client().makeRequest( - "DELETE", - "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}?refresh=$refresh&deleteDelegateMonitors=$deleteDelegates", - emptyMap(), - workflow.toHttpEntity() - ) + protected fun deleteWorkflow( + workflow: Workflow, + deleteDelegates: Boolean = false, + refresh: Boolean = true, + ): Response { + val response = + client().makeRequest( + "DELETE", + "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}?refresh=$refresh&deleteDelegateMonitors=$deleteDelegates", + emptyMap(), + workflow.toHttpEntity(), + ) assertEquals("Unable to delete a workflow", RestStatus.OK, response.restStatus()) return response } @@ -246,12 +275,13 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { deleteDelegates: Boolean = false, refresh: Boolean = true, ): Response { - val response = client.makeRequest( - "DELETE", - "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}?refresh=$refresh&deleteDelegateMonitors=$deleteDelegates", - emptyMap(), - workflow.toHttpEntity() - ) + val response = + client.makeRequest( + "DELETE", + "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}?refresh=$refresh&deleteDelegateMonitors=$deleteDelegates", + emptyMap(), + workflow.toHttpEntity(), + ) assertEquals("Unable to delete a workflow", RestStatus.OK, response.restStatus()) return response @@ -261,52 +291,70 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { * Destinations are now deprecated in favor of the Notification plugin's configs. * This method should only be used for checking legacy behavior/Notification migration scenarios. */ - protected fun createDestination(destination: Destination = getTestDestination(), refresh: Boolean = true): Destination { + protected fun createDestination( + destination: Destination = getTestDestination(), + refresh: Boolean = true, + ): Destination { // Create Alerting config index if it doesn't exist to avoid mapping issues with legacy destination indexing createAlertingConfigIndex() - val response = indexDocWithAdminClient( - ScheduledJob.SCHEDULED_JOBS_INDEX, - UUIDs.base64UUID(), - destination.toJsonStringWithType(), - refresh - ) - val destinationJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val response = + indexDocWithAdminClient( + ScheduledJob.SCHEDULED_JOBS_INDEX, + UUIDs.base64UUID(), + destination.toJsonStringWithType(), + refresh, + ) + val destinationJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() return destination.copy( id = destinationJson["_id"] as String, version = (destinationJson["_version"] as Int).toLong(), - primaryTerm = destinationJson["_primary_term"] as Int + primaryTerm = destinationJson["_primary_term"] as Int, ) } - protected fun deleteDestination(destination: Destination = getTestDestination(), refresh: Boolean = true): Response { - val response = client().makeRequest( - "DELETE", - "$DESTINATION_BASE_URI/${destination.id}?refresh=$refresh", - emptyMap(), - destination.toHttpEntity() - ) + protected fun deleteDestination( + destination: Destination = getTestDestination(), + refresh: Boolean = true, + ): Response { + val response = + client().makeRequest( + "DELETE", + "$DESTINATION_BASE_URI/${destination.id}?refresh=$refresh", + emptyMap(), + destination.toHttpEntity(), + ) assertEquals("Unable to delete destination", RestStatus.OK, response.restStatus()) return response } - protected fun updateDestination(destination: Destination, refresh: Boolean = true): Destination { - val response = client().makeRequest( - "PUT", - "$DESTINATION_BASE_URI/${destination.id}?refresh=$refresh", - emptyMap(), - destination.toHttpEntity() - ) + protected fun updateDestination( + destination: Destination, + refresh: Boolean = true, + ): Destination { + val response = + client().makeRequest( + "PUT", + "$DESTINATION_BASE_URI/${destination.id}?refresh=$refresh", + emptyMap(), + destination.toHttpEntity(), + ) assertEquals("Unable to update a destination", RestStatus.OK, response.restStatus()) - val destinationJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val destinationJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() assertUserNull(destinationJson as HashMap) return destination.copy(id = destinationJson["_id"] as String, version = (destinationJson["_version"] as Int).toLong()) @@ -343,20 +391,27 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { * Email Accounts are now deprecated in favor of the Notification plugin's configs. * This method should only be used for checking legacy behavior/Notification migration scenarios. */ - protected fun createEmailAccount(emailAccount: EmailAccount = getTestEmailAccount(), refresh: Boolean = true): EmailAccount { + protected fun createEmailAccount( + emailAccount: EmailAccount = getTestEmailAccount(), + refresh: Boolean = true, + ): EmailAccount { // Create Alerting config index if it doesn't exist to avoid mapping issues with legacy destination indexing createAlertingConfigIndex() - val response = indexDocWithAdminClient( - ScheduledJob.SCHEDULED_JOBS_INDEX, - UUIDs.base64UUID(), - emailAccount.toJsonStringWithType(), - refresh - ) - val emailAccountJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val response = + indexDocWithAdminClient( + ScheduledJob.SCHEDULED_JOBS_INDEX, + UUIDs.base64UUID(), + emailAccount.toJsonStringWithType(), + refresh, + ) + val emailAccountJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() return emailAccount.copy(id = emailAccountJson["_id"] as String) } @@ -366,7 +421,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return getEmailAccount(emailAccountID = emailAccountID) } - protected fun createRandomEmailAccountWithGivenName(refresh: Boolean = true, randomName: String): EmailAccount { + protected fun createRandomEmailAccountWithGivenName( + refresh: Boolean = true, + randomName: String, + ): EmailAccount { val emailAccount = randomEmailAccount(salt = randomName) val emailAccountID = createEmailAccount(emailAccount, refresh).id return getEmailAccount(emailAccountID = emailAccountID) @@ -403,20 +461,27 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { * Email Groups are now deprecated in favor of the Notification plugin's configs. * This method should only be used for checking legacy behavior/Notification migration scenarios. */ - protected fun createEmailGroup(emailGroup: EmailGroup = getTestEmailGroup(), refresh: Boolean = true): EmailGroup { + protected fun createEmailGroup( + emailGroup: EmailGroup = getTestEmailGroup(), + refresh: Boolean = true, + ): EmailGroup { // Create Alerting config index if it doesn't exist to avoid mapping issues with legacy destination indexing createAlertingConfigIndex() - val response = indexDocWithAdminClient( - ScheduledJob.SCHEDULED_JOBS_INDEX, - UUIDs.base64UUID(), - emailGroup.toJsonStringWithType(), - refresh - ) - val emailGroupJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val response = + indexDocWithAdminClient( + ScheduledJob.SCHEDULED_JOBS_INDEX, + UUIDs.base64UUID(), + emailGroup.toJsonStringWithType(), + refresh, + ) + val emailGroupJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() return emailGroup.copy(id = emailGroupJson["_id"] as String) } @@ -426,7 +491,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return getEmailGroup(emailGroupID = emailGroupID) } - protected fun createRandomEmailGroupWithGivenName(refresh: Boolean = true, randomName: String): EmailGroup { + protected fun createRandomEmailGroupWithGivenName( + refresh: Boolean = true, + randomName: String, + ): EmailGroup { val emailGroup = randomEmailGroup(salt = randomName) val emailGroupID = createEmailGroup(emailGroup, refresh).id return getEmailGroup(emailGroupID = emailGroupID) @@ -434,23 +502,25 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { @Suppress("UNCHECKED_CAST") protected fun getDestination(destination: Destination): Map { - val response = client().makeRequest( - "GET", - "$DESTINATION_BASE_URI/${destination.id}" - ) + val response = + client().makeRequest( + "GET", + "$DESTINATION_BASE_URI/${destination.id}", + ) assertEquals("Unable to update a destination", RestStatus.OK, response.restStatus()) - val destinationJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val destinationJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() assertUserNull(destinationJson as HashMap) return (destinationJson["destinations"] as List)[0] as Map } @Suppress("UNCHECKED_CAST") - protected fun getDestinations(dataMap: Map = emptyMap()): List> { - return getDestinations(client(), dataMap) - } + protected fun getDestinations(dataMap: Map = emptyMap()): List> = getDestinations(client(), dataMap) @Suppress("UNCHECKED_CAST") protected fun getDestinations( @@ -458,28 +528,31 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { dataMap: Map = emptyMap(), header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ): List> { - var baseEndpoint = "$DESTINATION_BASE_URI?" for (entry in dataMap.entries) { baseEndpoint += "${entry.key}=${entry.value}&" } - val response = client.makeRequest( - "GET", - baseEndpoint, - null, - header - ) + val response = + client.makeRequest( + "GET", + baseEndpoint, + null, + header, + ) assertEquals("Unable to update a destination", RestStatus.OK, response.restStatus()) - val destinationJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val destinationJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() return destinationJson["destinations"] as List> } - protected fun getTestDestination(): Destination { - return Destination( + protected fun getTestDestination(): Destination = + Destination( type = DestinationType.TEST_ACTION, name = "test", user = randomUser(), @@ -487,9 +560,8 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { chime = null, slack = null, customWebhook = null, - email = null + email = null, ) - } fun getSlackDestination(): Destination { val slack = Slack("https://hooks.slack.com/services/slackId") @@ -501,7 +573,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { chime = null, slack = slack, customWebhook = null, - email = null + email = null, ) } @@ -515,23 +587,24 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { chime = chime, slack = null, customWebhook = null, - email = null + email = null, ) } fun getCustomWebhookDestination(): Destination { - val customWebhook = CustomWebhook( - "https://hooks.slack.com/services/customWebhookId", - null, - null, - 80, - null, - null, - emptyMap(), - emptyMap(), - null, - null - ) + val customWebhook = + CustomWebhook( + "https://hooks.slack.com/services/customWebhookId", + null, + null, + 80, + null, + null, + emptyMap(), + emptyMap(), + null, + null, + ) return Destination( type = DestinationType.CUSTOM_WEBHOOK, name = "test", @@ -540,30 +613,31 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { chime = null, slack = null, customWebhook = customWebhook, - email = null + email = null, ) } - private fun getTestEmailAccount(): EmailAccount { - return EmailAccount( + private fun getTestEmailAccount(): EmailAccount = + EmailAccount( name = "test", email = "test@email.com", host = "smtp.com", port = 25, method = EmailAccount.MethodType.NONE, username = null, - password = null + password = null, ) - } - private fun getTestEmailGroup(): EmailGroup { - return EmailGroup( + private fun getTestEmailGroup(): EmailGroup = + EmailGroup( name = "test", - emails = listOf() + emails = listOf(), ) - } - protected fun verifyIndexSchemaVersion(index: String, expectedVersion: Int) { + protected fun verifyIndexSchemaVersion( + index: String, + expectedVersion: Int, + ) { val indexMapping = client().getIndexMapping(index) val indexName = indexMapping.keys.toList()[0] val mappings = indexMapping.stringMap(indexName)?.stringMap("mappings") @@ -576,22 +650,31 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } protected fun createAlert(alert: Alert): Alert { - val response = adminClient().makeRequest( - "POST", "/${AlertIndices.ALERT_INDEX}/_doc?refresh=true&routing=${alert.monitorId}", - emptyMap(), alert.toHttpEntityWithUser() - ) + val response = + adminClient().makeRequest( + "POST", + "/${AlertIndices.ALERT_INDEX}/_doc?refresh=true&routing=${alert.monitorId}", + emptyMap(), + alert.toHttpEntityWithUser(), + ) assertEquals("Unable to create a new alert", RestStatus.CREATED, response.restStatus()) - val alertJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val alertJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() assertNull(alertJson["monitor_user"]) return alert.copy(id = alertJson["_id"] as String, version = (alertJson["_version"] as Int).toLong()) } - protected fun createRandomMonitor(refresh: Boolean = false, withMetadata: Boolean = false): Monitor { + protected fun createRandomMonitor( + refresh: Boolean = false, + withMetadata: Boolean = false, + ): Monitor { val monitor = randomQueryLevelMonitor(withMetadata = withMetadata) val monitorId = createMonitor(monitor, refresh).id if (withMetadata) { @@ -613,7 +696,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return getMonitorV2(monitorV2Id = pplMonitorId) as PPLSQLMonitor } - protected fun createRandomDocumentMonitor(refresh: Boolean = false, withMetadata: Boolean = false): Monitor { + protected fun createRandomDocumentMonitor( + refresh: Boolean = false, + withMetadata: Boolean = false, + ): Monitor { val monitor = randomDocumentLevelMonitor(withMetadata = withMetadata) val monitorId = createMonitor(monitor, refresh).id if (withMetadata) { @@ -623,35 +709,51 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } @Suppress("UNCHECKED_CAST") - protected fun updateMonitor(monitor: Monitor, refresh: Boolean = false): Monitor { - val response = client().makeRequest( - "PUT", "${monitor.relativeUrl()}?refresh=$refresh", - emptyMap(), monitor.toHttpEntity() - ) + protected fun updateMonitor( + monitor: Monitor, + refresh: Boolean = false, + ): Monitor { + val response = + client().makeRequest( + "PUT", + "${monitor.relativeUrl()}?refresh=$refresh", + emptyMap(), + monitor.toHttpEntity(), + ) assertEquals("Unable to update a monitor", RestStatus.OK, response.restStatus()) assertUserNull(response.asMap()["monitor"] as Map) return getMonitor(monitorId = monitor.id) } @Suppress("UNCHECKED_CAST") - protected fun updateWorkflow(workflow: Workflow, refresh: Boolean = false): Workflow { - val response = client().makeRequest( - "PUT", - "${workflow.relativeUrl()}?refresh=$refresh", - emptyMap(), - workflow.toHttpEntity() - ) + protected fun updateWorkflow( + workflow: Workflow, + refresh: Boolean = false, + ): Workflow { + val response = + client().makeRequest( + "PUT", + "${workflow.relativeUrl()}?refresh=$refresh", + emptyMap(), + workflow.toHttpEntity(), + ) assertEquals("Unable to update a workflow", RestStatus.OK, response.restStatus()) assertUserNull(response.asMap()["workflow"] as Map) return getWorkflow(workflowId = workflow.id) } @Suppress("UNCHECKED_CAST") - protected fun updateMonitorV2(monitorV2: MonitorV2, refresh: Boolean = false): MonitorV2 { - val response = client().makeRequest( - "PUT", "$MONITOR_V2_BASE_URI/${monitorV2.id}?refresh=$refresh", - emptyMap(), monitorV2.toHttpEntity() - ) + protected fun updateMonitorV2( + monitorV2: MonitorV2, + refresh: Boolean = false, + ): MonitorV2 { + val response = + client().makeRequest( + "PUT", + "$MONITOR_V2_BASE_URI/${monitorV2.id}?refresh=$refresh", + emptyMap(), + monitorV2.toHttpEntity(), + ) assertEquals("Unable to update a monitorV2", RestStatus.OK, response.restStatus()) return getMonitorV2(monitorV2Id = monitorV2.id) } @@ -662,10 +764,13 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { rbacRoles: List = emptyList(), refresh: Boolean = true, ): Monitor { - val response = client.makeRequest( - "PUT", "${monitor.relativeUrl()}?refresh=$refresh", - emptyMap(), createMonitorEntityWithBackendRoles(monitor, rbacRoles) - ) + val response = + client.makeRequest( + "PUT", + "${monitor.relativeUrl()}?refresh=$refresh", + emptyMap(), + createMonitorEntityWithBackendRoles(monitor, rbacRoles), + ) assertEquals("Unable to update a monitor", RestStatus.OK, response.restStatus()) assertUserNull(response.asMap()["monitor"] as Map) return getMonitor(monitorId = monitor.id) @@ -677,18 +782,22 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { rbacRoles: List = emptyList(), refresh: Boolean = true, ): Workflow { - val response = client.makeRequest( - "PUT", - "${workflow.relativeUrl()}?refresh=$refresh", - emptyMap(), - createWorkflowEntityWithBackendRoles(workflow, rbacRoles) - ) + val response = + client.makeRequest( + "PUT", + "${workflow.relativeUrl()}?refresh=$refresh", + emptyMap(), + createWorkflowEntityWithBackendRoles(workflow, rbacRoles), + ) assertEquals("Unable to update a workflow", RestStatus.OK, response.restStatus()) assertUserNull(response.asMap()["workflow"] as Map) return getWorkflow(workflowId = workflow.id) } - protected fun getMonitor(monitorId: String, header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json")): Monitor { + protected fun getMonitor( + monitorId: String, + header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ): Monitor { val response = client().makeRequest("GET", "$ALERTING_BASE_URI/$monitorId", null, header) assertEquals("Unable to get monitor $monitorId", RestStatus.OK, response.restStatus()) @@ -703,14 +812,23 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { parser.nextToken() when (parser.currentName()) { - "_id" -> id = parser.text() - "_version" -> version = parser.longValue() - "monitor" -> monitor = Monitor.parse(parser) + "_id" -> { + id = parser.text() + } + + "_version" -> { + version = parser.longValue() + } + + "monitor" -> { + monitor = Monitor.parse(parser) + } + "associated_workflows" -> { XContentParserUtils.ensureExpectedToken( XContentParser.Token.START_ARRAY, parser.currentToken(), - parser + parser, ) while (parser.nextToken() != XContentParser.Token.END_ARRAY) { // do nothing @@ -725,7 +843,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun getMonitorV2( monitorV2Id: String, - header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ): MonitorV2 { val response = client().makeRequest("GET", "$MONITOR_V2_BASE_URI/$monitorV2Id", null, header) assertEquals("Unable to get monitorV2 $monitorV2Id", RestStatus.OK, response.restStatus()) @@ -758,39 +876,43 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { ): List { if (refresh) refreshIndex(indices) - val request = """ + val request = + """ { "version" : true, "query": { "match_all": {} } } - """.trimIndent() + """.trimIndent() val httpResponse = adminClient().makeRequest("GET", "/$indices/_search", StringEntity(request, APPLICATION_JSON)) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) val searchResponse = SearchResponse.fromXContent(createParser(jsonXContent, httpResponse.entity.content)) - return searchResponse.hits.hits.map { - val xcp = createParser(jsonXContent, it.sourceRef).also { it.nextToken() } - Alert.parse(xcp, it.id, it.version) - }.filter { alert -> alert.monitorId == monitor.id } + return searchResponse.hits.hits + .map { + val xcp = createParser(jsonXContent, it.sourceRef).also { it.nextToken() } + Alert.parse(xcp, it.id, it.version) + }.filter { alert -> alert.monitorId == monitor.id } } protected fun createFinding( monitorId: String = "NO_ID", monitorName: String = "NO_NAME", index: String = "testIndex", - docLevelQueries: List = listOf( - DocLevelQuery(query = "test_field:\"us-west-2\"", name = "testQuery", fields = listOf()) - ), + docLevelQueries: List = + listOf( + DocLevelQuery(query = "test_field:\"us-west-2\"", name = "testQuery", fields = listOf()), + ), matchingDocIds: List, ): String { - val finding = Finding( - id = UUID.randomUUID().toString(), - relatedDocIds = matchingDocIds, - monitorId = monitorId, - monitorName = monitorName, - index = index, - docLevelQueries = docLevelQueries, - timestamp = Instant.now() - ) + val finding = + Finding( + id = UUID.randomUUID().toString(), + relatedDocIds = matchingDocIds, + monitorId = monitorId, + monitorName = monitorName, + index = index, + docLevelQueries = docLevelQueries, + timestamp = Instant.now(), + ) val findingStr = finding.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() @@ -805,22 +927,28 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { ): List { if (refresh) refreshIndex(indices) - val request = """ + val request = + """ { "version" : true, "query": { "match_all": {} } } - """.trimIndent() + """.trimIndent() val httpResponse = adminClient().makeRequest("GET", "/$indices/_search", StringEntity(request, APPLICATION_JSON)) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) val searchResponse = SearchResponse.fromXContent(createParser(jsonXContent, httpResponse.entity.content)) - return searchResponse.hits.hits.map { - val xcp = createParser(jsonXContent, it.sourceRef).also { it.nextToken() } - Finding.parse(xcp) - }.filter { finding -> finding.monitorId == monitor.id } + return searchResponse.hits.hits + .map { + val xcp = createParser(jsonXContent, it.sourceRef).also { it.nextToken() } + Finding.parse(xcp) + }.filter { finding -> finding.monitorId == monitor.id } } - protected fun searchAlerts(monitor: Monitor, indices: String = AlertIndices.ALERT_INDEX, refresh: Boolean = true): List { + protected fun searchAlerts( + monitor: Monitor, + indices: String = AlertIndices.ALERT_INDEX, + refresh: Boolean = true, + ): List { try { if (refresh) refreshIndex(indices) } catch (e: Exception) { @@ -830,11 +958,12 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { // If this is a test monitor (it doesn't have an ID) and no alerts will be saved for it. val searchParams = if (monitor.id != Monitor.NO_ID) mapOf("routing" to monitor.id) else mapOf() - val request = """ + val request = + """ { "version" : true, "query" : { "term" : { "${Alert.MONITOR_ID_FIELD}" : "${monitor.id}" } } } - """.trimIndent() + """.trimIndent() val httpResponse = adminClient().makeRequest("GET", "/$indices/_search", searchParams, StringEntity(request, APPLICATION_JSON)) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) @@ -848,7 +977,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun searchAlertV2s( monitorV2Id: String, indices: String = AlertV2Indices.ALERT_V2_INDEX, - refresh: Boolean = true + refresh: Boolean = true, ): List { try { if (refresh) refreshIndex(indices) @@ -859,11 +988,12 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { // If this is a test monitor (it doesn't have an ID) and no alerts will be saved for it. val searchParams = if (monitorV2Id != MonitorV2.NO_ID) mapOf("routing" to monitorV2Id) else mapOf() - val request = """ + val request = + """ { "version" : true, "query" : { "term" : { "${AlertV2.MONITOR_V2_ID_FIELD}" : "$monitorV2Id" } } } - """.trimIndent() + """.trimIndent() val httpResponse = adminClient().makeRequest("GET", "/$indices/_search", searchParams, StringEntity(request, APPLICATION_JSON)) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) @@ -874,32 +1004,49 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } } - protected fun acknowledgeAlerts(monitor: Monitor, vararg alerts: Alert): Response { - val request = XContentFactory.jsonBuilder().startObject() - .array("alerts", *alerts.map { it.id }.toTypedArray()) - .endObject() - .string() - .let { StringEntity(it, APPLICATION_JSON) } - - val response = client().makeRequest( - "POST", "${monitor.relativeUrl()}/_acknowledge/alerts?refresh=true", - emptyMap(), request - ) + protected fun acknowledgeAlerts( + monitor: Monitor, + vararg alerts: Alert, + ): Response { + val request = + XContentFactory + .jsonBuilder() + .startObject() + .array("alerts", *alerts.map { it.id }.toTypedArray()) + .endObject() + .string() + .let { StringEntity(it, APPLICATION_JSON) } + + val response = + client().makeRequest( + "POST", + "${monitor.relativeUrl()}/_acknowledge/alerts?refresh=true", + emptyMap(), + request, + ) assertEquals("Acknowledge call failed.", RestStatus.OK, response.restStatus()) return response } - protected fun acknowledgeChainedAlerts(workflowId: String, vararg alertId: String): Response { - val request = jsonBuilder().startObject() - .array("alerts", *alertId.map { it }.toTypedArray()) - .endObject() - .string() - .let { StringEntity(it, APPLICATION_JSON) } - - val response = client().makeRequest( - "POST", "${AlertingPlugin.WORKFLOW_BASE_URI}/$workflowId/_acknowledge/alerts", - emptyMap(), request - ) + protected fun acknowledgeChainedAlerts( + workflowId: String, + vararg alertId: String, + ): Response { + val request = + jsonBuilder() + .startObject() + .array("alerts", *alertId.map { it }.toTypedArray()) + .endObject() + .string() + .let { StringEntity(it, APPLICATION_JSON) } + + val response = + client().makeRequest( + "POST", + "${AlertingPlugin.WORKFLOW_BASE_URI}/$workflowId/_acknowledge/alerts", + emptyMap(), + request, + ) assertEquals("Acknowledge call failed.", RestStatus.OK, response.restStatus()) return response } @@ -922,17 +1069,16 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun getAlerts( dataMap: Map = emptyMap(), header: BasicHeader = BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), - ): Response { - return getAlerts(client(), dataMap, header) - } + ): Response = getAlerts(client(), dataMap, header) protected fun getAlertV2s(): Response { - val response = client().makeRequest( - "GET", - "$MONITOR_V2_BASE_URI/alerts", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val response = + client().makeRequest( + "GET", + "$MONITOR_V2_BASE_URI/alerts", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get call failed.", RestStatus.OK, response.restStatus()) return response } @@ -949,24 +1095,25 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return response } - protected fun executeMonitor(monitorId: String, params: Map = mutableMapOf()): Response { - return executeMonitor(client(), monitorId, params) - } + protected fun executeMonitor( + monitorId: String, + params: Map = mutableMapOf(), + ): Response = executeMonitor(client(), monitorId, params) - protected fun executeWorkflow(workflowId: String, params: Map = mutableMapOf()): Response { - return executeWorkflow(client(), workflowId, params) - } + protected fun executeWorkflow( + workflowId: String, + params: Map = mutableMapOf(), + ): Response = executeWorkflow(client(), workflowId, params) protected fun getWorkflowAlerts( workflowId: String, alertId: String? = "", getAssociatedAlerts: Boolean = true, - ): Response { - return getWorkflowAlerts( + ): Response = + getWorkflowAlerts( client(), - mutableMapOf(Pair("workflowIds", workflowId), Pair("getAssociatedAlerts", getAssociatedAlerts), Pair("alertIds", alertId!!)) + mutableMapOf(Pair("workflowIds", workflowId), Pair("getAssociatedAlerts", getAssociatedAlerts), Pair("alertIds", alertId!!)), ) - } protected fun getWorkflowAlerts( client: RestClient, @@ -983,26 +1130,35 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return response } - protected fun executeMonitor(client: RestClient, monitorId: String, params: Map = mutableMapOf()): Response { - return client.makeRequest("POST", "$ALERTING_BASE_URI/$monitorId/_execute", params) - } + protected fun executeMonitor( + client: RestClient, + monitorId: String, + params: Map = mutableMapOf(), + ): Response = client.makeRequest("POST", "$ALERTING_BASE_URI/$monitorId/_execute", params) - protected fun executeWorkflow(client: RestClient, workflowId: String, params: Map = mutableMapOf()): Response { - return client.makeRequest("POST", "$WORKFLOW_ALERTING_BASE_URI/$workflowId/_execute", params) - } + protected fun executeWorkflow( + client: RestClient, + workflowId: String, + params: Map = mutableMapOf(), + ): Response = client.makeRequest("POST", "$WORKFLOW_ALERTING_BASE_URI/$workflowId/_execute", params) - protected fun executeMonitor(monitor: Monitor, params: Map = mapOf()): Response { - return executeMonitor(client(), monitor, params) - } + protected fun executeMonitor( + monitor: Monitor, + params: Map = mapOf(), + ): Response = executeMonitor(client(), monitor, params) - protected fun executeMonitor(client: RestClient, monitor: Monitor, params: Map = mapOf()): Response = - client.makeRequest("POST", "$ALERTING_BASE_URI/_execute", params, monitor.toHttpEntityWithUser()) + protected fun executeMonitor( + client: RestClient, + monitor: Monitor, + params: Map = mapOf(), + ): Response = client.makeRequest("POST", "$ALERTING_BASE_URI/_execute", params, monitor.toHttpEntityWithUser()) - protected fun executeMonitorV2(monitorId: String, params: Map = mutableMapOf()): Response = - client().makeRequest("POST", "$MONITOR_V2_BASE_URI/$monitorId/_execute", params) + protected fun executeMonitorV2( + monitorId: String, + params: Map = mutableMapOf(), + ): Response = client().makeRequest("POST", "$MONITOR_V2_BASE_URI/$monitorId/_execute", params) protected fun searchFindings(params: Map = mutableMapOf()): GetFindingsResponse { - var baseEndpoint = "${AlertingPlugin.FINDING_BASE_URI}/_search?" for (entry in params.entries) { baseEndpoint += "${entry.key}=${entry.value}&" @@ -1022,7 +1178,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { parser.nextToken() when (parser.currentName()) { - "total_findings" -> totalFindings = parser.intValue() + "total_findings" -> { + totalFindings = parser.intValue() + } + "findings" -> { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser) while (parser.nextToken() != XContentParser.Token.END_ARRAY) { @@ -1037,58 +1196,86 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun searchMonitors(): SearchResponse { var baseEndpoint = "${AlertingPlugin.MONITOR_BASE_URI}/_search?" - val request = """ + val request = + """ { "version" : true, "query": { "match_all": {} } } - """.trimIndent() + """.trimIndent() val httpResponse = adminClient().makeRequest("POST", baseEndpoint, StringEntity(request, APPLICATION_JSON)) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) return SearchResponse.fromXContent(createParser(jsonXContent, httpResponse.entity.content)) } - protected fun indexDoc(index: String, id: String, doc: String, refresh: Boolean = true): Response { - return indexDoc(client(), index, id, doc, refresh) - } + protected fun indexDoc( + index: String, + id: String, + doc: String, + refresh: Boolean = true, + ): Response = indexDoc(client(), index, id, doc, refresh) - protected fun indexDocWithAdminClient(index: String, id: String, doc: String, refresh: Boolean = true): Response { - return indexDoc(adminClient(), index, id, doc, refresh) - } + protected fun indexDocWithAdminClient( + index: String, + id: String, + doc: String, + refresh: Boolean = true, + ): Response = indexDoc(adminClient(), index, id, doc, refresh) - private fun indexDoc(client: RestClient, index: String, id: String, doc: String, refresh: Boolean = true): Response { + private fun indexDoc( + client: RestClient, + index: String, + id: String, + doc: String, + refresh: Boolean = true, + ): Response { val requestBody = StringEntity(doc, APPLICATION_JSON) val params = if (refresh) mapOf("refresh" to "true") else mapOf() val response = client.makeRequest("POST", "$index/_doc/$id?op_type=create", params, requestBody) assertTrue( "Unable to index doc: '${doc.take(15)}...' to index: '$index'", - listOf(RestStatus.OK, RestStatus.CREATED).contains(response.restStatus()) + listOf(RestStatus.OK, RestStatus.CREATED).contains(response.restStatus()), ) return response } - fun indexDoc(client: RestClient, index: String, doc: String, refresh: Boolean = true): Response { + fun indexDoc( + client: RestClient, + index: String, + doc: String, + refresh: Boolean = true, + ): Response { val requestBody = StringEntity(doc, APPLICATION_JSON) val params = if (refresh) mapOf("refresh" to "true") else mapOf() val response = client.makeRequest("POST", "$index/_doc?op_type=create", params, requestBody) assertTrue( "Unable to index doc: '${doc.take(15)}...' to index: '$index'", - listOf(RestStatus.OK, RestStatus.CREATED).contains(response.restStatus()) + listOf(RestStatus.OK, RestStatus.CREATED).contains(response.restStatus()), ) return response } - fun updateDoc(client: RestClient, index: String, id: String, doc: String, refresh: Boolean = true): Response { + fun updateDoc( + client: RestClient, + index: String, + id: String, + doc: String, + refresh: Boolean = true, + ): Response { val requestBody = StringEntity(doc, APPLICATION_JSON) val params = if (refresh) mapOf("refresh" to "true") else mapOf() val response = client.makeRequest("PUT", "$index/_doc/$id", params, requestBody) assertTrue( "Unable to index doc: '${doc.take(15)}...' to index: '$index'", - listOf(RestStatus.OK, RestStatus.CREATED).contains(response.restStatus()) + listOf(RestStatus.OK, RestStatus.CREATED).contains(response.restStatus()), ) return response } - protected fun deleteDoc(index: String, id: String, refresh: Boolean = true): Response { + protected fun deleteDoc( + index: String, + id: String, + refresh: Boolean = true, + ): Response { val params = if (refresh) mapOf("refresh" to "true") else mapOf() val response = client().makeRequest("DELETE", "$index/_doc/$id", params) assertTrue("Unable to delete doc with ID $id in index: '$index'", listOf(RestStatus.OK).contains(response.restStatus())) @@ -1098,38 +1285,50 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { /** A test index that can be used across tests. Feel free to add new fields but don't remove any. */ protected fun createTestIndex(index: String = randomAlphaOfLength(10).lowercase(Locale.ROOT)): String { createIndex( - index, Settings.EMPTY, + index, + Settings.EMPTY, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } - """.trimIndent() + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } + """.trimIndent(), ) return index } - protected fun createTestIndex(index: String = randomAlphaOfLength(10).lowercase(Locale.ROOT), settings: Settings): String { + protected fun createTestIndex( + index: String = randomAlphaOfLength(10).lowercase(Locale.ROOT), + settings: Settings, + ): String { createIndex( - index, settings, + index, + settings, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } - """.trimIndent() + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } + """.trimIndent(), ) return index } - protected fun createTestIndex(index: String, mapping: String): String { + protected fun createTestIndex( + index: String, + mapping: String, + ): String { createIndex(index, Settings.EMPTY, mapping.trimIndent()) return index } - protected fun createTestIndex(index: String, mapping: String?, alias: String): String { + protected fun createTestIndex( + index: String, + mapping: String?, + alias: String, + ): String { createIndex(index, Settings.EMPTY, mapping?.trimIndent(), alias) return index } @@ -1137,12 +1336,13 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun createTestConfigIndex(index: String = "." + randomAlphaOfLength(10).lowercase(Locale.ROOT)): String { try { createIndex( - index, Settings.builder().build(), + index, + Settings.builder().build(), """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" } - } - """.trimIndent() + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" } + } + """.trimIndent(), ) } catch (ex: WarningFailureException) { // ignore @@ -1154,32 +1354,31 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { alias: String = randomAlphaOfLength(10).lowercase(Locale.ROOT), numOfAliasIndices: Int = randomIntBetween(1, 10), includeWriteIndex: Boolean = true, - ): MutableMap> { - return createTestAlias(alias = alias, indices = randomAliasIndices(alias, numOfAliasIndices, includeWriteIndex)) - } + ): MutableMap> = + createTestAlias(alias = alias, indices = randomAliasIndices(alias, numOfAliasIndices, includeWriteIndex)) protected fun createTestAlias( alias: String = randomAlphaOfLength(10).lowercase(Locale.ROOT), numOfAliasIndices: Int = randomIntBetween(1, 10), includeWriteIndex: Boolean = true, indicesMapping: String, - ): MutableMap> { - return createTestAlias( + ): MutableMap> = + createTestAlias( alias = alias, indices = randomAliasIndices(alias, numOfAliasIndices, includeWriteIndex), - indicesMapping = indicesMapping + indicesMapping = indicesMapping, ) - } protected fun createTestAlias( alias: String = randomAlphaOfLength(10).lowercase(Locale.ROOT), - indices: Map = randomAliasIndices( - alias = alias, - num = randomIntBetween(1, 10), - includeWriteIndex = true - ), + indices: Map = + randomAliasIndices( + alias = alias, + num = randomIntBetween(1, 10), + includeWriteIndex = true, + ), createIndices: Boolean = true, - indicesMapping: String = "" + indicesMapping: String = "", ): MutableMap> { val indicesMap = mutableMapOf() val indicesJson = jsonBuilder().startObject().startArray("actions") @@ -1187,13 +1386,15 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { if (createIndices) createTestIndex(index = it, indicesMapping) val isWriteIndex = indices.getOrDefault(it, false) indicesMap[it] = isWriteIndex - val indexMap = mapOf( - "add" to mapOf( - "index" to it, - "alias" to alias, - "is_write_index" to isWriteIndex + val indexMap = + mapOf( + "add" to + mapOf( + "index" to it, + "alias" to alias, + "is_write_index" to isWriteIndex, + ), ) - ) indicesJson.value(indexMap) } val requestBody = indicesJson.endArray().endObject().string() @@ -1201,12 +1402,17 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return mutableMapOf(alias to indicesMap) } - protected fun createDataStream(datastream: String, mappings: String?, useComponentTemplate: Boolean) { + protected fun createDataStream( + datastream: String, + mappings: String?, + useComponentTemplate: Boolean, + ) { val indexPattern = "$datastream*" - var componentTemplateMappings = "\"properties\": {" + - " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + - " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + - "}" + var componentTemplateMappings = + "\"properties\": {" + + " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + + " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + + "}" if (mappings != null) { componentTemplateMappings = mappings } @@ -1214,7 +1420,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { // Setup index_template createComponentTemplateWithMappings( "my_ds_component_template-$datastream", - componentTemplateMappings + componentTemplateMappings, ) } createComposableIndexTemplate( @@ -1223,7 +1429,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { (if (useComponentTemplate) "my_ds_component_template-$datastream" else null), mappings, true, - 0 + 0, ) createDataStream(datastream) } @@ -1236,19 +1442,24 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { client().makeRequest("DELETE", "_data_stream/$datastream") } - protected fun createIndexAlias(alias: String, mappings: String?, setting: String? = "") { + protected fun createIndexAlias( + alias: String, + mappings: String?, + setting: String? = "", + ) { val indexPattern = "$alias*" - var componentTemplateMappings = "\"properties\": {" + - " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + - " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + - "}" + var componentTemplateMappings = + "\"properties\": {" + + " \"netflow.destination_transport_port\":{ \"type\": \"long\" }," + + " \"netflow.destination_ipv4_address\":{ \"type\": \"ip\" }" + + "}" if (mappings != null) { componentTemplateMappings = mappings } createComponentTemplateWithMappingsAndSettings( "my_alias_component_template-$alias", componentTemplateMappings, - setting + setting, ) createComposableIndexTemplate( "my_index_template_alias-$alias", @@ -1256,7 +1467,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { "my_alias_component_template-$alias", mappings, false, - 0 + 0, ) createTestIndex( "$alias-000001", @@ -1265,7 +1476,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { "$alias": { "is_write_index": true } - """.trimIndent() + """.trimIndent(), ) } @@ -1273,25 +1484,32 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { client().makeRequest("DELETE", "$alias*/_alias/$alias") } - protected fun createComponentTemplateWithMappings(componentTemplateName: String, mappings: String?) { + protected fun createComponentTemplateWithMappings( + componentTemplateName: String, + mappings: String?, + ) { val body = """{"template" : { "mappings": {$mappings} }}""" client().makeRequest( "PUT", "_component_template/$componentTemplateName", emptyMap(), StringEntity(body, ContentType.APPLICATION_JSON), - BasicHeader("Content-Type", "application/json") + BasicHeader("Content-Type", "application/json"), ) } - protected fun createComponentTemplateWithMappingsAndSettings(componentTemplateName: String, mappings: String?, setting: String?) { + protected fun createComponentTemplateWithMappingsAndSettings( + componentTemplateName: String, + mappings: String?, + setting: String?, + ) { val body = """{"template" : { "mappings": {$mappings}, "settings": {$setting} }}""" client().makeRequest( "PUT", "_component_template/$componentTemplateName", emptyMap(), StringEntity(body, ContentType.APPLICATION_JSON), - BasicHeader("Content-Type", "application/json") + BasicHeader("Content-Type", "application/json"), ) } @@ -1301,7 +1519,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { componentTemplateName: String?, mappings: String?, isDataStream: Boolean, - priority: Int + priority: Int, ) { var body = "{\n" if (isDataStream) { @@ -1309,7 +1527,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } body += "\"index_patterns\": [" + indexPatterns.stream().collect( - Collectors.joining(",", "\"", "\"") + Collectors.joining(",", "\"", "\""), ) + "]," if (componentTemplateName == null) { body += "\"template\": {\"mappings\": {$mappings}}," @@ -1323,7 +1541,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { "_index_template/$templateName", emptyMap(), StringEntity(body, APPLICATION_JSON), - BasicHeader("Content-Type", "application/json") + BasicHeader("Content-Type", "application/json"), ) } @@ -1347,7 +1565,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { "POST", datastream + "/_rollover", emptyMap(), - null + null, ) } @@ -1360,40 +1578,49 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { val writeIndex = randomIntBetween(0, num - 1) for (i: Int in 0 until num) { var indexName = randomAlphaOfLength(10).lowercase(Locale.ROOT) - while (indexName.equals(alias) || indices.containsKey(indexName)) + while (indexName.equals(alias) || indices.containsKey(indexName)) { indexName = randomAlphaOfLength(10).lowercase(Locale.ROOT) + } indices[indexName] = includeWriteIndex && i == writeIndex } return indices } - protected fun insertSampleTimeSerializedData(index: String, data: List) { + protected fun insertSampleTimeSerializedData( + index: String, + data: List, + ) { data.forEachIndexed { i, value -> val twoMinsAgo = ZonedDateTime.now().minus(2, ChronoUnit.MINUTES).truncatedTo(ChronoUnit.MILLIS) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(twoMinsAgo) - val testDoc = """ + val testDoc = + """ { "test_strict_date_time": "$testTime", "test_field": "$value", "number": "$i" } - """.trimIndent() + """.trimIndent() // Indexing documents with deterministic doc id to allow for easy selected deletion during testing indexDoc(index, (i + 1).toString(), testDoc) } } - protected fun insertSampleTimeSerializedDataCurrentTime(index: String, data: List) { + protected fun insertSampleTimeSerializedDataCurrentTime( + index: String, + data: List, + ) { data.forEachIndexed { i, value -> val time = ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(time) - val testDoc = """ + val testDoc = + """ { "test_strict_date_time": "$testTime", "test_field": "$value", "number": "$i" } - """.trimIndent() + """.trimIndent() // Indexing documents with deterministic doc id to allow for easy selected deletion during testing indexDoc(index, (i + 1).toString(), testDoc) } @@ -1406,19 +1633,23 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { ) { data.forEachIndexed { i, value -> val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(time) - val testDoc = """ + val testDoc = + """ { "test_strict_date_time": "$testTime", "test_field": "$value", "number": "$i" } - """.trimIndent() + """.trimIndent() // Indexing documents with deterministic doc id to allow for easy selected deletion during testing indexDoc(index, (i + 1).toString(), testDoc) } } - protected fun deleteDataWithDocIds(index: String, docIds: List) { + protected fun deleteDataWithDocIds( + index: String, + docIds: List, + ) { docIds.forEach { deleteDoc(index, it) } @@ -1448,9 +1679,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { createIndex(encodedHistoryIndex, settings, mappingHack, "\"${AlertV2Indices.ALERT_V2_HISTORY_WRITE_INDEX}\" : {}") } - fun scheduledJobMappings(): String { - return javaClass.classLoader.getResource("mappings/scheduled-jobs.json").readText() - } + fun scheduledJobMappings(): String = javaClass.classLoader.getResource("mappings/scheduled-jobs.json").readText() /** Creates the Alerting config index if it does not exist */ fun createAlertingConfigIndex(mapping: String? = null) { @@ -1462,48 +1691,34 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } } - protected fun Response.restStatus(): RestStatus { - return RestStatus.fromCode(this.statusLine.statusCode) - } + protected fun Response.restStatus(): RestStatus = RestStatus.fromCode(this.statusLine.statusCode) - protected fun Monitor.toHttpEntity(): HttpEntity { - return StringEntity(toJsonString(), APPLICATION_JSON) - } + protected fun Monitor.toHttpEntity(): HttpEntity = StringEntity(toJsonString(), APPLICATION_JSON) private fun Monitor.toJsonString(): String { val builder = XContentFactory.jsonBuilder() return shuffleXContent(toXContent(builder, ToXContent.EMPTY_PARAMS)).string() } - protected fun Monitor.toHttpEntityWithUser(): HttpEntity { - return StringEntity(toJsonStringWithUser(), APPLICATION_JSON) - } + protected fun Monitor.toHttpEntityWithUser(): HttpEntity = StringEntity(toJsonStringWithUser(), APPLICATION_JSON) private fun Monitor.toJsonStringWithUser(): String { val builder = jsonBuilder() return shuffleXContent(toXContentWithUser(builder, ToXContent.EMPTY_PARAMS)).string() } - protected fun MonitorV2.toHttpEntity(): HttpEntity { - return StringEntity(toJsonString(), APPLICATION_JSON) - } + protected fun MonitorV2.toHttpEntity(): HttpEntity = StringEntity(toJsonString(), APPLICATION_JSON) - private fun MonitorV2.toJsonString(): String { - return shuffleXContent(toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)).string() - } + private fun MonitorV2.toJsonString(): String = shuffleXContent(toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)).string() - protected fun MonitorV2.toHttpEntityWithUser(): HttpEntity { - return StringEntity(toJsonStringWithUser(), APPLICATION_JSON) - } + protected fun MonitorV2.toHttpEntityWithUser(): HttpEntity = StringEntity(toJsonStringWithUser(), APPLICATION_JSON) private fun MonitorV2.toJsonStringWithUser(): String { val builder = jsonBuilder() return shuffleXContent(toXContentWithUser(builder, ToXContent.EMPTY_PARAMS)).string() } - protected fun Destination.toHttpEntity(): HttpEntity { - return StringEntity(toJsonString(), APPLICATION_JSON) - } + protected fun Destination.toHttpEntity(): HttpEntity = StringEntity(toJsonString(), APPLICATION_JSON) protected fun Destination.toJsonString(): String { val builder = jsonBuilder() @@ -1513,13 +1728,11 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun Destination.toJsonStringWithType(): String { val builder = jsonBuilder() return shuffleXContent( - toXContent(builder, ToXContent.MapParams(mapOf("with_type" to "true"))) + toXContent(builder, ToXContent.MapParams(mapOf("with_type" to "true"))), ).string() } - protected fun EmailAccount.toHttpEntity(): HttpEntity { - return StringEntity(toJsonString(), APPLICATION_JSON) - } + protected fun EmailAccount.toHttpEntity(): HttpEntity = StringEntity(toJsonString(), APPLICATION_JSON) protected fun EmailAccount.toJsonString(): String { val builder = jsonBuilder() @@ -1529,13 +1742,11 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun EmailAccount.toJsonStringWithType(): String { val builder = jsonBuilder() return shuffleXContent( - toXContent(builder, ToXContent.MapParams(mapOf("with_type" to "true"))) + toXContent(builder, ToXContent.MapParams(mapOf("with_type" to "true"))), ).string() } - protected fun EmailGroup.toHttpEntity(): HttpEntity { - return StringEntity(toJsonString(), APPLICATION_JSON) - } + protected fun EmailGroup.toHttpEntity(): HttpEntity = StringEntity(toJsonString(), APPLICATION_JSON) protected fun EmailGroup.toJsonString(): String { val builder = jsonBuilder() @@ -1545,13 +1756,11 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun EmailGroup.toJsonStringWithType(): String { val builder = jsonBuilder() return shuffleXContent( - toXContent(builder, ToXContent.MapParams(mapOf("with_type" to "true"))) + toXContent(builder, ToXContent.MapParams(mapOf("with_type" to "true"))), ).string() } - protected fun Alert.toHttpEntityWithUser(): HttpEntity { - return StringEntity(toJsonStringWithUser(), APPLICATION_JSON) - } + protected fun Alert.toHttpEntityWithUser(): HttpEntity = StringEntity(toJsonStringWithUser(), APPLICATION_JSON) private fun Alert.toJsonStringWithUser(): String { val builder = jsonBuilder() @@ -1561,15 +1770,15 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun Monitor.relativeUrl() = "$ALERTING_BASE_URI/$id" // Useful settings when debugging to prevent timeouts - override fun restClientSettings(): Settings { - return if (isDebuggingTest || isDebuggingRemoteCluster) { - Settings.builder() + override fun restClientSettings(): Settings = + if (isDebuggingTest || isDebuggingRemoteCluster) { + Settings + .builder() .put(CLIENT_SOCKET_TIMEOUT, TimeValue.timeValueMinutes(10)) .build() } else { super.restClientSettings() } - } fun RestClient.getClusterSettings(settings: Map): Map { val response = this.makeRequest("GET", "_cluster/settings", settings) @@ -1584,22 +1793,27 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } fun RestClient.getSettings(): Map { - val response = this.makeRequest( - "GET", - "_cluster/settings?flat_settings=true" - ) + val response = + this.makeRequest( + "GET", + "_cluster/settings?flat_settings=true", + ) assertEquals(RestStatus.OK, response.restStatus()) return response.asMap() } - fun RestClient.updateSettings(setting: String, value: Any): Map { - val settings = jsonBuilder() - .startObject() - .startObject("persistent") - .field(setting, value) - .endObject() - .endObject() - .string() + fun RestClient.updateSettings( + setting: String, + value: Any, + ): Map { + val settings = + jsonBuilder() + .startObject() + .startObject("persistent") + .field(setting, value) + .endObject() + .endObject() + .string() val response = this.makeRequest("PUT", "_cluster/settings", StringEntity(settings, APPLICATION_JSON)) assertEquals(RestStatus.OK, response.restStatus()) return response.asMap() @@ -1617,7 +1831,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return map[key] } - fun getAlertingStats(metrics: String = "", alertingVersion: String? = null): Map { + fun getAlertingStats( + metrics: String = "", + alertingVersion: String? = null, + ): Map { val endpoint = "/_plugins/_alerting/stats$metrics${alertingVersion?.let { "?version=$it" }.orEmpty()}" val monitorStatsResponse = client().makeRequest("GET", endpoint) val responseMap = createParser(XContentType.JSON.xContent(), monitorStatsResponse.entity.content).map() @@ -1625,106 +1842,157 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } fun enableScheduledJob(): Response { - val updateResponse = client().makeRequest( - "PUT", "_cluster/settings", - emptyMap(), - StringEntity( - XContentFactory.jsonBuilder().startObject().field("persistent") - .startObject().field(ScheduledJobSettings.SWEEPER_ENABLED.key, true).endObject() - .endObject().string(), - ContentType.APPLICATION_JSON + val updateResponse = + client().makeRequest( + "PUT", + "_cluster/settings", + emptyMap(), + StringEntity( + XContentFactory + .jsonBuilder() + .startObject() + .field("persistent") + .startObject() + .field(ScheduledJobSettings.SWEEPER_ENABLED.key, true) + .endObject() + .endObject() + .string(), + ContentType.APPLICATION_JSON, + ), ) - ) return updateResponse } fun disableScheduledJob(): Response { - val updateResponse = client().makeRequest( - "PUT", "_cluster/settings", - emptyMap(), - StringEntity( - XContentFactory.jsonBuilder().startObject().field("persistent") - .startObject().field(ScheduledJobSettings.SWEEPER_ENABLED.key, false).endObject() - .endObject().string(), - ContentType.APPLICATION_JSON + val updateResponse = + client().makeRequest( + "PUT", + "_cluster/settings", + emptyMap(), + StringEntity( + XContentFactory + .jsonBuilder() + .startObject() + .field("persistent") + .startObject() + .field(ScheduledJobSettings.SWEEPER_ENABLED.key, false) + .endObject() + .endObject() + .string(), + ContentType.APPLICATION_JSON, + ), ) - ) return updateResponse } fun enableFilterBy() { - val updateResponse = client().makeRequest( - "PUT", "_cluster/settings", - emptyMap(), - StringEntity( - XContentFactory.jsonBuilder().startObject().field("persistent") - .startObject().field(AlertingSettings.FILTER_BY_BACKEND_ROLES.key, true).endObject() - .endObject().string(), - ContentType.APPLICATION_JSON + val updateResponse = + client().makeRequest( + "PUT", + "_cluster/settings", + emptyMap(), + StringEntity( + XContentFactory + .jsonBuilder() + .startObject() + .field("persistent") + .startObject() + .field(AlertingSettings.FILTER_BY_BACKEND_ROLES.key, true) + .endObject() + .endObject() + .string(), + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals(updateResponse.statusLine.toString(), 200, updateResponse.statusLine.statusCode) } fun disableFilterBy() { - val updateResponse = client().makeRequest( - "PUT", "_cluster/settings", - emptyMap(), - StringEntity( - jsonBuilder().startObject().field("persistent") - .startObject().field(AlertingSettings.FILTER_BY_BACKEND_ROLES.key, false).endObject() - .endObject().string(), - ContentType.APPLICATION_JSON + val updateResponse = + client().makeRequest( + "PUT", + "_cluster/settings", + emptyMap(), + StringEntity( + jsonBuilder() + .startObject() + .field("persistent") + .startObject() + .field(AlertingSettings.FILTER_BY_BACKEND_ROLES.key, false) + .endObject() + .endObject() + .string(), + ContentType.APPLICATION_JSON, + ), ) - ) assertEquals(updateResponse.statusLine.toString(), 200, updateResponse.statusLine.statusCode) } fun removeEmailFromAllowList() { - val allowedDestinations = DestinationType.values().toList() - .filter { destinationType -> destinationType != DestinationType.EMAIL } - .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } + val allowedDestinations = + DestinationType + .values() + .toList() + .filter { destinationType -> destinationType != DestinationType.EMAIL } + .joinToString(prefix = "[", postfix = "]") { string -> "\"$string\"" } client().updateSettings(DestinationSettings.ALLOW_LIST.key, allowedDestinations) } - fun createUser(name: String, backendRoles: Array) { + fun createUser( + name: String, + backendRoles: Array, + ) { this.createUserWithAttributes(name, backendRoles, mapOf()) } - fun createUserWithAttributes(name: String, backendRoles: Array, customAttributes: Map) { + fun createUserWithAttributes( + name: String, + backendRoles: Array, + customAttributes: Map, + ) { val request = Request("PUT", "/_plugins/_security/api/internalusers/$name") val broles = backendRoles.joinToString { it -> "\"$it\"" } - val customAttributesString = customAttributes.entries.joinToString(prefix = "{", separator = ", ", postfix = "}") { - "\"${it.key}\": \"${it.value}\"" - } - var entity = """ + val customAttributesString = + customAttributes.entries.joinToString(prefix = "{", separator = ", ", postfix = "}") { + "\"${it.key}\": \"${it.value}\"" + } + var entity = + """ { "password": "$password", "backend_roles": [$broles], "attributes": $customAttributesString } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - fun patchUserBackendRoles(name: String, backendRoles: Array) { + fun patchUserBackendRoles( + name: String, + backendRoles: Array, + ) { val request = Request("PATCH", "/_plugins/_security/api/internalusers/$name") val broles = backendRoles.joinToString { "\"$it\"" } - var entity = """ + var entity = + """ [{ "op": "replace", "path": "/backend_roles", "value": [$broles] }] - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - fun createIndexRole(name: String, index: String) { + fun createIndexRole( + name: String, + index: String, + ) { val request = Request("PUT", "/_plugins/_security/api/roles/$name") - var entity = """ + var entity = + """ { "cluster_permissions": [], "index_permissions": [{ @@ -1736,15 +2004,20 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { }], "tenant_permissions": [] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - fun createCustomIndexRole(name: String, index: String, clusterPermissions: String?) { + fun createCustomIndexRole( + name: String, + index: String, + clusterPermissions: String?, + ) { val request = Request("PUT", "/_plugins/_security/api/roles/$name") val clusterPerms = if (clusterPermissions.isNullOrEmpty()) "[]" else "[\"$clusterPermissions\"]" - var entity = """ + var entity = + """ { "cluster_permissions": $clusterPerms, "index_permissions": [{ @@ -1756,20 +2029,25 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { }], "tenant_permissions": [] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - private fun createCustomIndexRole(name: String, index: String, clusterPermissions: List) { + private fun createCustomIndexRole( + name: String, + index: String, + clusterPermissions: List, + ) { val request = Request("PUT", "/_plugins/_security/api/roles/$name") val clusterPermissionsStr = clusterPermissions.stream().map { p: String? -> "\"" + p + "\"" }.collect( - Collectors.joining(",") + Collectors.joining(","), ) - var entity = """ + var entity = + """ { "cluster_permissions": [$clusterPermissionsStr], "index_permissions": [{ @@ -1781,15 +2059,21 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { }], "tenant_permissions": [] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - fun createIndexRoleWithDocLevelSecurity(name: String, index: String, dlsQuery: String, clusterPermissions: String? = "") { + fun createIndexRoleWithDocLevelSecurity( + name: String, + index: String, + dlsQuery: String, + clusterPermissions: String? = "", + ) { val request = Request("PUT", "/_plugins/_security/api/roles/$name") val clusterPerms = if (clusterPermissions.isNullOrEmpty()) "[]" else "[\"$clusterPermissions\"]" - var entity = """ + var entity = + """ { "cluster_permissions": $clusterPerms, "index_permissions": [{ @@ -1801,19 +2085,25 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { }], "tenant_permissions": [] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - fun createIndexRoleWithDocLevelSecurity(name: String, index: String, dlsQuery: String, clusterPermissions: List) { + fun createIndexRoleWithDocLevelSecurity( + name: String, + index: String, + dlsQuery: String, + clusterPermissions: List, + ) { val clusterPermissionsStr = clusterPermissions.stream().map { p: String -> "\"" + getClusterPermissionsFromCustomRole(p) + "\"" }.collect( - Collectors.joining(",") + Collectors.joining(","), ) val request = Request("PUT", "/_plugins/_security/api/roles/$name") - var entity = """ + var entity = + """ { "cluster_permissions": [$clusterPermissionsStr], "index_permissions": [{ @@ -1825,38 +2115,47 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { }], "tenant_permissions": [] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - fun createUserRolesMapping(role: String, users: Array) { + fun createUserRolesMapping( + role: String, + users: Array, + ) { val request = Request("PUT", "/_plugins/_security/api/rolesmapping/$role") val usersStr = users.joinToString { it -> "\"$it\"" } - var entity = """ + var entity = + """ { "backend_roles": [], "hosts": [], "users": [$usersStr] } - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) } - fun updateRoleMapping(role: String, users: List, addUser: Boolean) { + fun updateRoleMapping( + role: String, + users: List, + addUser: Boolean, + ) { val request = Request("PATCH", "/_plugins/_security/api/rolesmapping/$role") val usersStr = users.joinToString { it -> "\"$it\"" } val op = if (addUser) "add" else "remove" - val entity = """ + val entity = + """ [{ "op": "$op", "path": "/users", "value": [$usersStr] }] - """.trimIndent() + """.trimIndent() request.setJsonEntity(entity) client().performRequest(request) @@ -1879,7 +2178,12 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { deleteRole(role) } - fun createUserWithTestData(user: String, index: String, role: String, backendRole: String) { + fun createUserWithTestData( + user: String, + index: String, + role: String, + backendRole: String, + ) { createUser(user, arrayOf(backendRole)) createTestIndex(index) createIndexRole(role, index) @@ -1956,9 +2260,7 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { createUserRolesMapping(role, arrayOf(user)) } - fun getClusterPermissionsFromCustomRole(clusterPermissions: String): String? { - return ROLE_TO_PERMISSION_MAPPING.get(clusterPermissions) - } + fun getClusterPermissionsFromCustomRole(clusterPermissions: String): String? = ROLE_TO_PERMISSION_MAPPING.get(clusterPermissions) companion object { internal interface IProxy { @@ -1966,18 +2268,20 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { var sessionId: String? fun getExecutionData(reset: Boolean): ByteArray? + fun dump(reset: Boolean) + fun reset() } /* - * We need to be able to dump the jacoco coverage before the cluster shuts down. - * The new internal testing framework removed some gradle tasks we were listening to, - * to choose a good time to do it. This will dump the executionData to file after each test. - * TODO: This is also currently just overwriting integTest.exec with the updated execData without - * resetting after writing each time. This can be improved to either write an exec file per test - * or by letting jacoco append to the file. - * */ + * We need to be able to dump the jacoco coverage before the cluster shuts down. + * The new internal testing framework removed some gradle tasks we were listening to, + * to choose a good time to do it. This will dump the executionData to file after each test. + * TODO: This is also currently just overwriting integTest.exec with the updated execData without + * resetting after writing each time. This can be improved to either write an exec file per test + * or by letting jacoco append to the file. + * */ @JvmStatic @AfterClass fun dumpCoverage() { @@ -1986,12 +2290,13 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { val jacocoBuildPath = System.getProperty("jacoco.dir") ?: return val serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi" JMXConnectorFactory.connect(JMXServiceURL(serverUrl)).use { connector -> - val proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.mBeanServerConnection, - ObjectName("org.jacoco:type=Runtime"), - IProxy::class.java, - false - ) + val proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.mBeanServerConnection, + ObjectName("org.jacoco:type=Runtime"), + IProxy::class.java, + false, + ) proxy.getExecutionData(false)?.let { val path = PathUtils.get("$jacocoBuildPath/integTest.exec") Files.write(path, it) @@ -2000,12 +2305,18 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } } - protected fun createRandomWorkflow(monitorIds: List, refresh: Boolean = false): Workflow { + protected fun createRandomWorkflow( + monitorIds: List, + refresh: Boolean = false, + ): Workflow { val workflow = randomWorkflow(monitorIds = monitorIds) return createWorkflow(workflow, refresh) } - private fun createWorkflowEntityWithBackendRoles(workflow: Workflow, rbacRoles: List?): HttpEntity { + private fun createWorkflowEntityWithBackendRoles( + workflow: Workflow, + rbacRoles: List?, + ): HttpEntity { if (rbacRoles == null) { return workflow.toHttpEntity() } @@ -2022,27 +2333,32 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { rbacRoles: List? = null, refresh: Boolean = true, ): Workflow { - val response = client.makeRequest( - "POST", "$WORKFLOW_ALERTING_BASE_URI?refresh=$refresh", emptyMap(), - createWorkflowEntityWithBackendRoles(workflow, rbacRoles) - ) + val response = + client.makeRequest( + "POST", + "$WORKFLOW_ALERTING_BASE_URI?refresh=$refresh", + emptyMap(), + createWorkflowEntityWithBackendRoles(workflow, rbacRoles), + ) assertEquals("Unable to create a new monitor", RestStatus.CREATED, response.restStatus()) - val workflowJson = jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - response.entity.content - ).map() + val workflowJson = + jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.entity.content, + ).map() assertUserNull(workflowJson as HashMap) return workflow.copy(id = workflowJson["_id"] as String) } - protected fun createWorkflow(workflow: Workflow, refresh: Boolean = true): Workflow { - return createWorkflowWithClient(client(), workflow, emptyList(), refresh) - } + protected fun createWorkflow( + workflow: Workflow, + refresh: Boolean = true, + ): Workflow = createWorkflowWithClient(client(), workflow, emptyList(), refresh) - protected fun Workflow.toHttpEntity(): HttpEntity { - return StringEntity(toJsonString(), APPLICATION_JSON) - } + protected fun Workflow.toHttpEntity(): HttpEntity = StringEntity(toJsonString(), APPLICATION_JSON) private fun Workflow.toJsonString(): String { val builder = XContentFactory.jsonBuilder() @@ -2079,18 +2395,24 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { protected fun Workflow.relativeUrl() = "$WORKFLOW_ALERTING_BASE_URI/$id" - protected fun createAlertComment(alertId: String, content: String, client: RestClient): Comment { - val createRequestBody = jsonBuilder() - .startObject() - .field(COMMENT_CONTENT_FIELD, content) - .endObject() - .string() - - val createResponse = client.makeRequest( - "POST", - "$COMMENTS_BASE_URI/$alertId", - StringEntity(createRequestBody, APPLICATION_JSON) - ) + protected fun createAlertComment( + alertId: String, + content: String, + client: RestClient, + ): Comment { + val createRequestBody = + jsonBuilder() + .startObject() + .field(COMMENT_CONTENT_FIELD, content) + .endObject() + .string() + + val createResponse = + client.makeRequest( + "POST", + "$COMMENTS_BASE_URI/$alertId", + StringEntity(createRequestBody, APPLICATION_JSON), + ) assertEquals("Unable to create a new comment", RestStatus.CREATED, createResponse.restStatus()) @@ -2106,25 +2428,34 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { entityType = comment["entity_type"] as String, content = comment["content"] as String, createdTime = Instant.ofEpochMilli(comment["created_time"] as Long), - lastUpdatedTime = if (comment["last_updated_time"] != null) { - Instant.ofEpochMilli(comment["last_updated_time"] as Long) - } else null, - user = comment["user"]?.let { User(it as String, emptyList(), emptyList(), emptyList()) } + lastUpdatedTime = + if (comment["last_updated_time"] != null) { + Instant.ofEpochMilli(comment["last_updated_time"] as Long) + } else { + null + }, + user = comment["user"]?.let { User(it as String, emptyList(), emptyList(), emptyList()) }, ) } - protected fun updateAlertComment(commentId: String, content: String, client: RestClient): Comment { - val updateRequestBody = jsonBuilder() - .startObject() - .field(COMMENT_CONTENT_FIELD, content) - .endObject() - .string() - - val updateResponse = client.makeRequest( - "PUT", - "$COMMENTS_BASE_URI/$commentId", - StringEntity(updateRequestBody, APPLICATION_JSON) - ) + protected fun updateAlertComment( + commentId: String, + content: String, + client: RestClient, + ): Comment { + val updateRequestBody = + jsonBuilder() + .startObject() + .field(COMMENT_CONTENT_FIELD, content) + .endObject() + .string() + + val updateResponse = + client.makeRequest( + "PUT", + "$COMMENTS_BASE_URI/$commentId", + StringEntity(updateRequestBody, APPLICATION_JSON), + ) assertEquals("Update comment failed", RestStatus.OK, updateResponse.restStatus()) @@ -2138,19 +2469,26 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { entityType = comment["entity_type"] as String, content = comment["content"] as String, createdTime = Instant.ofEpochMilli(comment["created_time"] as Long), - lastUpdatedTime = if (comment["last_updated_time"] != null) { - Instant.ofEpochMilli(comment["last_updated_time"] as Long) - } else null, - user = comment["user"]?.let { User(it as String, emptyList(), emptyList(), emptyList()) } + lastUpdatedTime = + if (comment["last_updated_time"] != null) { + Instant.ofEpochMilli(comment["last_updated_time"] as Long) + } else { + null + }, + user = comment["user"]?.let { User(it as String, emptyList(), emptyList(), emptyList()) }, ) } - protected fun searchAlertComments(query: SearchSourceBuilder, client: RestClient): XContentParser { - val searchResponse = client.makeRequest( - "GET", - "$COMMENTS_BASE_URI/_search", - StringEntity(query.toString(), APPLICATION_JSON) - ) + protected fun searchAlertComments( + query: SearchSourceBuilder, + client: RestClient, + ): XContentParser { + val searchResponse = + client.makeRequest( + "GET", + "$COMMENTS_BASE_URI/_search", + StringEntity(query.toString(), APPLICATION_JSON), + ) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) @@ -2158,11 +2496,15 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } // returns the ID of the delete comment - protected fun deleteAlertComment(commentId: String, client: RestClient): String { - val deleteResponse = client.makeRequest( - "DELETE", - "$COMMENTS_BASE_URI/$commentId" - ) + protected fun deleteAlertComment( + commentId: String, + client: RestClient, + ): String { + val deleteResponse = + client.makeRequest( + "DELETE", + "$COMMENTS_BASE_URI/$commentId", + ) assertEquals("Delete comment failed", RestStatus.OK, deleteResponse.restStatus()) @@ -2172,7 +2514,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { return deletedCommentId } - protected fun isMonitorScheduled(monitorId: String, alertingStatsResponse: Map): Boolean { + protected fun isMonitorScheduled( + monitorId: String, + alertingStatsResponse: Map, + ): Boolean { val nodesInfo = alertingStatsResponse["nodes"] as Map for (nodeId in nodesInfo.keys) { val nodeInfo = nodesInfo[nodeId] as Map @@ -2191,7 +2536,12 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { // this function only works on the TEST_INDEX_NAME index created // specifically for this IT suite. It has fields // "timestamp" (date), "abc" (string), "number" (integer) - protected fun indexDocFromSomeTimeAgo(timeValue: Long, timeUnit: ChronoUnit, abc: String, number: Int) { + protected fun indexDocFromSomeTimeAgo( + timeValue: Long, + timeUnit: ChronoUnit, + abc: String, + number: Int, + ) { val someTimeAgo = ZonedDateTime.now().minus(timeValue, timeUnit).truncatedTo(MILLIS) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(someTimeAgo) // the timestamp string is given a random timezone offset val testDoc = """{ "timestamp" : "$testTime", "abc": "$abc", "number" : "$number" }""" @@ -2210,10 +2560,12 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { }, 10, TimeUnit.SECONDS) val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() - val searchResponse = client().makeRequest( - "POST", "$MONITOR_V2_BASE_URI/_search", - StringEntity(search, APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "POST", + "$MONITOR_V2_BASE_URI/_search", + StringEntity(search, APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) @@ -2224,7 +2576,10 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { // takes in an execute monitor API response and returns true if the // trigger condition was met. assumes the monitor executed only had 1 trigger - protected fun isTriggered(pplMonitor: PPLSQLMonitor, executeResponse: Response): Boolean { + protected fun isTriggered( + pplMonitor: PPLSQLMonitor, + executeResponse: Response, + ): Boolean { val executeResponseMap = entityAsMap(executeResponse) val triggerResultsObj = (executeResponseMap["trigger_results"] as Map)[pplMonitor.triggers[0].id] as Map return triggerResultsObj["triggered"] as Boolean @@ -2247,18 +2602,25 @@ abstract class AlertingRestTestCase : ODFERestTestCase() { } protected fun getAlertV2HistoryDocCount(): Long { - val request = """ + val request = + """ { "query": { "match_all": {} } } - """.trimIndent() - val response = adminClient().makeRequest( - "POST", "${AlertV2Indices.ALERT_V2_HISTORY_ALL}/_search", emptyMap(), - StringEntity(request, APPLICATION_JSON) - ) + """.trimIndent() + val response = + adminClient().makeRequest( + "POST", + "${AlertV2Indices.ALERT_V2_HISTORY_ALL}/_search", + emptyMap(), + StringEntity(request, APPLICATION_JSON), + ) assertEquals("Request to get alert v2 history failed", RestStatus.OK, response.restStatus()) - return SearchResponse.fromXContent(createParser(jsonXContent, response.entity.content)).hits.totalHits!!.value + return SearchResponse + .fromXContent(createParser(jsonXContent, response.entity.content)) + .hits.totalHits!! + .value } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/DocLeveFanOutIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/DocLeveFanOutIT.kt index f3fa8d4a5..b8b91451f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/DocLeveFanOutIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/DocLeveFanOutIT.kt @@ -15,10 +15,11 @@ import org.opensearch.commons.alerting.model.action.PerExecutionActionScope import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit.MILLIS +import kotlin.test.Test @ThreadLeakScope(ThreadLeakScope.Scope.NONE) class DocLeveFanOutIT : AlertingRestTestCase() { - + @Test fun `test execution reaches endtime before completing execution`() { val updateSettings1 = adminClient().updateSettings(AlertingSettings.FINDING_HISTORY_ENABLED.key, false) logger.info(updateSettings1) @@ -35,22 +36,24 @@ class DocLeveFanOutIT : AlertingRestTestCase() { val actionExecutionScope = PerExecutionActionScope() val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) - val actions = (0..randomInt(10)).map { - randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = actionExecutionPolicy - ) - } + val actions = + (0..randomInt(10)).map { + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + actionExecutionPolicy = actionExecutionPolicy, + ) + } val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ), ) - ) assertNotNull(monitor.id) executeMonitor(monitor.id) indexDoc(testIndex, "1", testDoc) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt index ca5bc1059..2e3706934 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/DocumentMonitorRunnerIT.kt @@ -37,9 +37,7 @@ import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicBoolean class DocumentMonitorRunnerIT : AlertingRestTestCase() { - fun `test execute monitor with dryrun with index pattern`() { - val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ "message" : "This is an error from IAD region", @@ -53,10 +51,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index + "*"), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ) indexDoc(index, "1", testDoc) @@ -69,7 +68,6 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { } fun `test execute monitor with dryrun`() { - val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ "message" : "This is an error from IAD region", @@ -83,10 +81,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ) indexDoc(index, "1", testDoc) @@ -101,8 +100,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(1, triggerResult.objectMap("action_results").values.size) for (alertActionResult in triggerResult.objectMap("action_results").values) { for (actionResult in alertActionResult.values) { - @Suppress("UNCHECKED_CAST") val actionOutput = (actionResult as Map>)["output"] - as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = + (actionResult as Map>)["output"] + as Map assertEquals("Hello ${monitor.name}", actionOutput["subject"]) assertEquals("Hello ${monitor.name}", actionOutput["message"]) } @@ -119,17 +120,18 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - var httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + var httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) var searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) searchResponse.hits.totalHits?.let { assertEquals("Query saved in query index", 0L, it.value) } } fun `test dryrun execute monitor with queryFieldNames set up with correct field`() { - val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ "message" : "This is an error from IAD region", @@ -144,10 +146,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ) indexDoc(index, "1", testDoc) @@ -162,8 +165,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(1, triggerResult.objectMap("action_results").values.size) for (alertActionResult in triggerResult.objectMap("action_results").values) { for (actionResult in alertActionResult.values) { - @Suppress("UNCHECKED_CAST") val actionOutput = (actionResult as Map>)["output"] - as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = + (actionResult as Map>)["output"] + as Map assertEquals("Hello ${monitor.name}", actionOutput["subject"]) assertEquals("Hello ${monitor.name}", actionOutput["message"]) } @@ -190,14 +195,16 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf( - randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action)) - ) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = + listOf( + randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action)), + ), + ), ) - ) assertNotNull(monitor.id) indexDoc(index, "1", testDoc) @@ -239,13 +246,14 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val actionExecutionScope = PerExecutionActionScope() val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) - val actions = (0..randomInt(10)).map { - randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = actionExecutionPolicy - ) - } + val actions = + (0..randomInt(10)).map { + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + actionExecutionPolicy = actionExecutionPolicy, + ) + } val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions) val monitor = createMonitor(randomDocumentLevelMonitor(inputs = listOf(docLevelInput), triggers = listOf(trigger))) @@ -279,7 +287,6 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { } fun `test dryrun execute monitor with queryFieldNames set up with wrong field`() { - val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ "message" : "This is an error from IAD region", @@ -289,19 +296,21 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val index = createTestIndex() // using wrong field name - val docQuery = DocLevelQuery( - query = "test_field:\"us-west-2\"", - name = "3", - fields = listOf(), - queryFieldNames = listOf("wrong_field") - ) + val docQuery = + DocLevelQuery( + query = "test_field:\"us-west-2\"", + name = "3", + fields = listOf(), + queryFieldNames = listOf("wrong_field"), + ) val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ) indexDoc(index, "1", testDoc) indexDoc(index, "2", testDoc) @@ -320,8 +329,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(0, triggerResult.objectMap("action_results").values.size) for (alertActionResult in triggerResult.objectMap("action_results").values) { for (actionResult in alertActionResult.values) { - @Suppress("UNCHECKED_CAST") val actionOutput = (actionResult as Map>)["output"] - as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = + (actionResult as Map>)["output"] + as Map assertEquals("Hello ${monitor.name}", actionOutput["subject"]) assertEquals("Hello ${monitor.name}", actionOutput["message"]) } @@ -343,19 +354,21 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val index = createTestIndex() // using wrong field name - val docQuery = DocLevelQuery( - query = "test_field:\"us-west-2\"", - name = "3", - fields = listOf(), - queryFieldNames = listOf("wrong_field") - ) + val docQuery = + DocLevelQuery( + query = "test_field:\"us-west-2\"", + name = "3", + fields = listOf(), + queryFieldNames = listOf("wrong_field"), + ) val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ) indexDoc(index, "1", testDoc) @@ -370,8 +383,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(1, triggerResult.objectMap("action_results").values.size) for (alertActionResult in triggerResult.objectMap("action_results").values) { for (actionResult in alertActionResult.values) { - @Suppress("UNCHECKED_CAST") val actionOutput = (actionResult as Map>)["output"] - as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = + (actionResult as Map>)["output"] + as Map assertEquals("Hello ${monitor.name}", actionOutput["subject"]) assertEquals("Hello ${monitor.name}", actionOutput["message"]) } @@ -407,6 +422,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -420,10 +436,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - var httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + var httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) var searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) searchResponse.hits.totalHits?.let { assertEquals("Query saved in query index", 0L, it.value) } @@ -454,6 +472,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -467,10 +486,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - var httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + var httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) var searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) searchResponse.hits.totalHits?.let { assertEquals(0L, it.value) } @@ -500,6 +521,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor2.name, output2["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult2 = (output2.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery2 = searchResult2[docQuery2.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery2.size) @@ -517,10 +539,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { // ensure query from second monitor was saved val expectedQueries = listOf("test_field_test1_${monitor2.id}:\"us-east-1\"") - httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) searchResponse.hits.forEach { hit -> @@ -556,6 +580,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -585,14 +610,15 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor( - name = "__lag-monitor-test__", - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + name = "__lag-monitor-test__", + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), + ), ) - ) assertNotNull(monitor.id) indexDoc(testIndex, "1", testDoc) @@ -623,13 +649,14 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), + ), ) - ) assertNotNull(monitor.id) indexDoc(testIndex, "1", testDoc) @@ -678,6 +705,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -738,17 +766,19 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val alertCategories = AlertCategory.values() - val actionExecutionScope = PerAlertActionScope( - actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet() - ) - val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) - val actions = (0..randomInt(10)).map { - randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = actionExecutionPolicy + val actionExecutionScope = + PerAlertActionScope( + actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet(), ) - } + val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) + val actions = + (0..randomInt(10)).map { + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + actionExecutionPolicy = actionExecutionPolicy, + ) + } val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions) val monitor = createMonitor(randomDocumentLevelMonitor(inputs = listOf(docLevelInput), triggers = listOf(trigger))) @@ -764,6 +794,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -774,8 +805,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { for (alertActionResult in triggerResult.objectMap("action_results").values) { assertEquals(actions.size, alertActionResult.values.size) for (actionResult in alertActionResult.values) { - @Suppress("UNCHECKED_CAST") val actionOutput = (actionResult as Map>)["output"] - as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = + (actionResult as Map>)["output"] + as Map assertEquals("Hello ${monitor.name}", actionOutput["subject"]) assertEquals("Hello ${monitor.name}", actionOutput["message"]) } @@ -807,13 +840,14 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val actionExecutionScope = PerExecutionActionScope() val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) - val actions = (0..randomInt(10)).map { - randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = actionExecutionPolicy - ) - } + val actions = + (0..randomInt(10)).map { + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + actionExecutionPolicy = actionExecutionPolicy, + ) + } val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions) val monitor = createMonitor(randomDocumentLevelMonitor(inputs = listOf(docLevelInput), triggers = listOf(trigger))) @@ -829,6 +863,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -839,8 +874,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { for (alertActionResult in triggerResult.objectMap("action_results").values) { assertEquals(actions.size, alertActionResult.values.size) for (actionResult in alertActionResult.values) { - @Suppress("UNCHECKED_CAST") val actionOutput = (actionResult as Map>)["output"] - as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = + (actionResult as Map>)["output"] + as Map assertEquals("Hello ${monitor.name}", actionOutput["subject"]) assertEquals("Hello ${monitor.name}", actionOutput["message"]) } @@ -888,6 +925,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -935,6 +973,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Correct search result", 10, matchingDocsToQuery.size) @@ -1271,9 +1310,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { // } fun `test execute monitor with indices having fields with same name but different field mappings`() { - val testIndex = createTestIndex( - "test1", - """"properties": { + val testIndex = + createTestIndex( + "test1", + """ + "properties": { "source": { "properties": { "id": { @@ -1287,12 +1328,14 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "analyzer":"whitespace" } } - """.trimIndent() - ) + """.trimIndent(), + ) - val testIndex2 = createTestIndex( - "test2", - """"properties": { + val testIndex2 = + createTestIndex( + "test2", + """ + "properties": { "source": { "properties": { "id": { @@ -1304,19 +1347,20 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "type":"text" } } - """.trimIndent() - ) + """.trimIndent(), + ) val testDoc = """{ "source" : {"id" : "12345" }, "nested_field": { "test1": "some text" }, "test_field": "12345" }""" - val docQuery = DocLevelQuery( - query = "test_field:\"12345\" AND source.id:\"12345\"", - name = "5", - fields = listOf() - ) + val docQuery = + DocLevelQuery( + query = "test_field:\"12345\" AND source.id:\"12345\"", + name = "5", + fields = listOf(), + ) val docLevelInput = DocLevelMonitorInput("description", listOf("test1", "test2"), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) @@ -1335,10 +1379,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals("Findings saved for test monitor", 2, findings.size) // as mappings of source.id & test_field are different so, both of them expands - val expectedQueries = listOf( - "test_field_test2_${monitor.id}:\"12345\" AND source.id_test2_${monitor.id}:\"12345\"", - "test_field_test1_${monitor.id}:\"12345\" AND source.id_test1_${monitor.id}:\"12345\"" - ) + val expectedQueries = + listOf( + "test_field_test2_${monitor.id}:\"12345\" AND source.id_test2_${monitor.id}:\"12345\"", + "test_field_test1_${monitor.id}:\"12345\" AND source.id_test1_${monitor.id}:\"12345\"", + ) val request = """{ "size": 10, @@ -1346,10 +1391,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - var httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + var httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) var searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) searchResponse.hits.forEach { hit -> @@ -1359,9 +1406,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { } fun `test execute monitor with indices having fields with same name but different field mappings in multiple indices`() { - val testIndex = createTestIndex( - "test1", - """"properties": { + val testIndex = + createTestIndex( + "test1", + """ + "properties": { "source": { "properties": { "device": { @@ -1382,22 +1431,26 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "type":"text" } } - """.trimIndent() - ) + """.trimIndent(), + ) - val testIndex2 = createTestIndex( - "test2", - """"properties": { + val testIndex2 = + createTestIndex( + "test2", + """ + "properties": { "test_field" : { "type":"keyword" } } - """.trimIndent() - ) + """.trimIndent(), + ) - val testIndex4 = createTestIndex( - "test4", - """"properties": { + val testIndex4 = + createTestIndex( + "test4", + """ + "properties": { "source": { "properties": { "device": { @@ -1417,8 +1470,8 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "type":"text" } } - """.trimIndent() - ) + """.trimIndent(), + ) val testDoc1 = """{ "source" : {"device" : {"hwd" : {"id" : "12345"}} }, @@ -1429,16 +1482,18 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "test_field": "12345" }""" - val docQuery1 = DocLevelQuery( - query = "test_field:\"12345\"", - name = "4", - fields = listOf() - ) - val docQuery2 = DocLevelQuery( - query = "source.device.hwd.id:\"12345\"", - name = "5", - fields = listOf() - ) + val docQuery1 = + DocLevelQuery( + query = "test_field:\"12345\"", + name = "4", + fields = listOf(), + ) + val docQuery2 = + DocLevelQuery( + query = "source.device.hwd.id:\"12345\"", + name = "5", + fields = listOf(), + ) val docLevelInput = DocLevelMonitorInput("description", listOf("test1", "test2", "test4"), listOf(docQuery1, docQuery2)) @@ -1465,10 +1520,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - val httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + val httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) val searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) @@ -1500,10 +1557,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - var httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + var httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) var searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) @@ -1513,10 +1572,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { indexDoc(testIndex2, "1", testDoc) executeMonitor(monitor.id) - httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) @@ -1616,8 +1677,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val output = entityAsMap(response) val inputResults = output.stringMap("input_results") val errorMessage = inputResults?.get("error") + @Suppress("UNCHECKED_CAST") val searchResult = (inputResults?.get("results") as List>).firstOrNull() + @Suppress("UNCHECKED_CAST") val findings = searchFindings() @@ -1655,8 +1718,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val output = entityAsMap(response) val inputResults = output.stringMap("input_results") val errorMessage = inputResults?.get("error") + @Suppress("UNCHECKED_CAST") val searchResult = (inputResults?.get("results") as List>).firstOrNull() + @Suppress("UNCHECKED_CAST") val findings = searchFindings() @@ -1666,7 +1731,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docIds = it.finding.relatedDocIds assertTrue( "Findings index should not contain a pre-existing doc, but found $it", - preExistingDocIds.intersect(docIds).isEmpty() + preExistingDocIds.intersect(docIds).isEmpty(), ) } } @@ -1708,8 +1773,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val output = entityAsMap(response) val inputResults = output.stringMap("input_results") val errorMessage = inputResults?.get("error") + @Suppress("UNCHECKED_CAST") val searchResult = (inputResults?.get("results") as List>).firstOrNull() + @Suppress("UNCHECKED_CAST") val findings = searchFindings() @@ -1719,7 +1786,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docIds = it.finding.relatedDocIds assertTrue( "Findings index should not contain a pre-existing doc, but found $it", - preExistingDocIds.intersect(docIds).isEmpty() + preExistingDocIds.intersect(docIds).isEmpty(), ) assertTrue("Found an unexpected finding $it", newDocIds.intersect(docIds).isNotEmpty()) } @@ -1769,8 +1836,10 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val output = entityAsMap(response) val inputResults = output.stringMap("input_results") val errorMessage = inputResults?.get("error") + @Suppress("UNCHECKED_CAST") val searchResult = (inputResults?.get("results") as List>).firstOrNull() + @Suppress("UNCHECKED_CAST") val findings = searchFindings() @@ -1780,7 +1849,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docIds = it.finding.relatedDocIds assertTrue( "Findings index should not contain a pre-existing doc, but found $it", - preExistingDocIds.intersect(docIds).isEmpty() + preExistingDocIds.intersect(docIds).isEmpty(), ) assertTrue("Found doc that doesn't match query: $it", nonMatchingDocIds.intersect(docIds).isEmpty()) assertFalse("Found an unexpected finding $it", matchingDocIds.intersect(docIds).isNotEmpty()) @@ -1792,25 +1861,26 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createDataStream( dataStreamName, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } """.trimIndent(), - false + false, ) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(dataStreamName), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ), ) - ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1823,6 +1893,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { var response = executeMonitor(monitor.id) var output = entityAsMap(response) var searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") var matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 1, matchingDocsToQuery.size) @@ -1844,25 +1915,26 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createDataStream( dataStreamName, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } """.trimIndent(), - false + false, ) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(dataStreamName), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ), ) - ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1875,6 +1947,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { var response = executeMonitor(monitor.id) var output = entityAsMap(response) var searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") var matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 1, matchingDocsToQuery.size) @@ -1907,24 +1980,25 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createIndexAlias( aliasName, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } - """.trimIndent() + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } + """.trimIndent(), ) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf("$aliasName"), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ), ) - ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1937,6 +2011,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { var response = executeMonitor(monitor.id) var output = entityAsMap(response) var searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") var matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 1, matchingDocsToQuery.size) @@ -1960,25 +2035,25 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createDataStream( dataStreamName1, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } """.trimIndent(), - false + false, ) val dataStreamName2 = "test-datastream2" createDataStream( dataStreamName2, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } """.trimIndent(), - false + false, ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -1996,18 +2071,20 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf("test-datastream1"), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ), ) - ) indexDoc(dataStreamName1, "1", testDoc) indexDoc(dataStreamName2, "1", testDoc) var response = executeMonitor(monitor.id) var output = entityAsMap(response) var searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") var matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -2046,13 +2123,13 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createDataStream( dataStreamName, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } """.trimIndent(), - false + false, ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -2070,17 +2147,19 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(dataStreamName), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ), ) - ) indexDoc(dataStreamName, "1", testDoc) var response = executeMonitor(monitor.id) var output = entityAsMap(response) var searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") var matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 1, matchingDocsToQuery.size) @@ -2098,7 +2177,6 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { } fun `test execute monitor with non-null data sources`() { - val testIndex = createTestIndex() val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -2111,17 +2189,19 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val alertCategories = AlertCategory.values() - val actionExecutionScope = PerAlertActionScope( - actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet() - ) - val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) - val actions = (0..randomInt(10)).map { - randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = actionExecutionPolicy + val actionExecutionScope = + PerAlertActionScope( + actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet(), ) - } + val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) + val actions = + (0..randomInt(10)).map { + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + actionExecutionPolicy = actionExecutionPolicy, + ) + } val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions) try { @@ -2129,11 +2209,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { randomDocumentLevelMonitor( inputs = listOf(docLevelInput), triggers = listOf(trigger), - dataSources = DataSources( - findingsIndex = "custom_findings_index", - alertsIndex = "custom_alerts_index", - ) - ) + dataSources = + DataSources( + findingsIndex = "custom_findings_index", + alertsIndex = "custom_alerts_index", + ), + ), ) fail("Expected create monitor to fail") } catch (e: ResponseException) { @@ -2158,12 +2239,13 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { var docLevelInput = DocLevelMonitorInput("description", listOf(index1, index2, index4, index5), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ), ) - ) indexDoc(index1, "1", testDoc) indexDoc(index2, "1", testDoc) @@ -2213,6 +2295,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -2234,24 +2317,25 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createIndexAlias( aliasName, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } - """.trimIndent() + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } + """.trimIndent(), ) val docQuery = DocLevelQuery(query = "NOT test_field:\"us-east-1\" AND _exists_: test_field", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf("$aliasName"), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ), ) - ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -2264,6 +2348,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { var response = executeMonitor(monitor.id) var output = entityAsMap(response) var searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") var matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 1, matchingDocsToQuery.size) @@ -2327,9 +2412,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { // } fun `test execute monitor with indices having fields with same name different field mappings in multiple indices with NOT EQUALS`() { - val testIndex = createTestIndex( - "test1", - """"properties": { + val testIndex = + createTestIndex( + "test1", + """ + "properties": { "source": { "properties": { "device": { @@ -2350,22 +2437,26 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "type":"text" } } - """.trimIndent() - ) + """.trimIndent(), + ) - val testIndex2 = createTestIndex( - "test2", - """"properties": { + val testIndex2 = + createTestIndex( + "test2", + """ + "properties": { "test_field" : { "type":"keyword" } } - """.trimIndent() - ) + """.trimIndent(), + ) - val testIndex4 = createTestIndex( - "test4", - """"properties": { + val testIndex4 = + createTestIndex( + "test4", + """ + "properties": { "source": { "properties": { "device": { @@ -2385,8 +2476,8 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "type":"text" } } - """.trimIndent() - ) + """.trimIndent(), + ) val testDoc1 = """{ "source" : {"device" : {"hwd" : {"id" : "123456"}} }, @@ -2397,16 +2488,18 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "test_field": "123456" }""" - val docQuery1 = DocLevelQuery( - query = "NOT test_field:\"12345\" AND _exists_: test_field", - name = "4", - fields = listOf() - ) - val docQuery2 = DocLevelQuery( - query = "NOT source.device.hwd.id:\"12345\" AND _exists_: source.device.hwd.id", - name = "5", - fields = listOf() - ) + val docQuery1 = + DocLevelQuery( + query = "NOT test_field:\"12345\" AND _exists_: test_field", + name = "4", + fields = listOf(), + ) + val docQuery2 = + DocLevelQuery( + query = "NOT source.device.hwd.id:\"12345\" AND _exists_: source.device.hwd.id", + name = "5", + fields = listOf(), + ) val docLevelInput = DocLevelMonitorInput("description", listOf("$testIndex", "$testIndex2", "$testIndex4"), listOf(docQuery1, docQuery2)) @@ -2434,10 +2527,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - val httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + val httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) val searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) @@ -2445,9 +2540,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { } fun `test execute monitor with indices having fields with same name but different field mappings with NOT EQUALS`() { - val testIndex = createTestIndex( - "test1", - """"properties": { + val testIndex = + createTestIndex( + "test1", + """ + "properties": { "source": { "properties": { "id": { @@ -2461,12 +2558,14 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "analyzer":"whitespace" } } - """.trimIndent() - ) + """.trimIndent(), + ) - val testIndex2 = createTestIndex( - "test2", - """"properties": { + val testIndex2 = + createTestIndex( + "test2", + """ + "properties": { "source": { "properties": { "id": { @@ -2478,19 +2577,20 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "type":"text" } } - """.trimIndent() - ) + """.trimIndent(), + ) val testDoc = """{ "source" : {"id" : "12345" }, "nested_field": { "test1": "some text" }, "test_field": "12345" }""" - val docQuery = DocLevelQuery( - query = "(NOT test_field:\"123456\" AND _exists_:test_field) AND source.id:\"12345\"", - name = "5", - fields = listOf() - ) + val docQuery = + DocLevelQuery( + query = "(NOT test_field:\"123456\" AND _exists_:test_field) AND source.id:\"12345\"", + name = "5", + fields = listOf(), + ) val docLevelInput = DocLevelMonitorInput("description", listOf("test1", "test2"), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) @@ -2509,12 +2609,13 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals("Findings saved for test monitor", 2, findings.size) // as mappings of source.id & test_field are different so, both of them expands - val expectedQueries = listOf( - "(NOT test_field_test2_${monitor.id}:\"123456\" AND _exists_:test_field_test2_${monitor.id}) " + - "AND source.id_test2_${monitor.id}:\"12345\"", - "(NOT test_field_test1_${monitor.id}:\"123456\" AND _exists_:test_field_test1_${monitor.id}) " + - "AND source.id_test1_${monitor.id}:\"12345\"" - ) + val expectedQueries = + listOf( + "(NOT test_field_test2_${monitor.id}:\"123456\" AND _exists_:test_field_test2_${monitor.id}) " + + "AND source.id_test2_${monitor.id}:\"12345\"", + "(NOT test_field_test1_${monitor.id}:\"123456\" AND _exists_:test_field_test1_${monitor.id}) " + + "AND source.id_test1_${monitor.id}:\"12345\"", + ) val request = """{ "size": 10, @@ -2522,10 +2623,12 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { "match_all": {} } }""" - var httpResponse = adminClient().makeRequest( - "GET", "/${monitor.dataSources.queryIndex}/_search", - StringEntity(request, ContentType.APPLICATION_JSON) - ) + var httpResponse = + adminClient().makeRequest( + "GET", + "/${monitor.dataSources.queryIndex}/_search", + StringEntity(request, ContentType.APPLICATION_JSON), + ) assertEquals("Search failed", RestStatus.OK, httpResponse.restStatus()) var searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, httpResponse.entity.content)) searchResponse.hits.forEach { hit -> @@ -2547,19 +2650,22 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val alertCategories = AlertCategory.values() - val actionExecutionScope = PerAlertActionScope( - actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet() - ) - val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) - val actions = (0..randomInt(10)).map { - randomActionWithPolicy( - template = randomTemplateScript( - "{{#ctx.alerts}}\n{{#associated_queries}}\n(name={{name}})\n{{/associated_queries}}\n{{/ctx.alerts}}" - ), - destinationId = createDestination().id, - actionExecutionPolicy = actionExecutionPolicy + val actionExecutionScope = + PerAlertActionScope( + actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet(), ) - } + val actionExecutionPolicy = ActionExecutionPolicy(actionExecutionScope) + val actions = + (0..randomInt(10)).map { + randomActionWithPolicy( + template = + randomTemplateScript( + "{{#ctx.alerts}}\n{{#associated_queries}}\n(name={{name}})\n{{/associated_queries}}\n{{/ctx.alerts}}", + ), + destinationId = createDestination().id, + actionExecutionPolicy = actionExecutionPolicy, + ) + } val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions) val monitor = createMonitor(randomDocumentLevelMonitor(inputs = listOf(docLevelInput), triggers = listOf(trigger))) @@ -2575,6 +2681,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult[docQuery.id] as List assertEquals("Incorrect search result", 2, matchingDocsToQuery.size) @@ -2589,7 +2696,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val actionOutput = (actionResult as Map>)["output"] as Map assertTrue( "The notification message is missing the query name.", - actionOutput["message"]!!.contains("(name=${docQuery.name})") + actionOutput["message"]!!.contains("(name=${docQuery.name})"), ) } } @@ -2610,7 +2717,8 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) // Prints all fields in doc source - val scriptSource1 = """ + val scriptSource1 = + """ Monitor {{ctx.monitor.name}} just entered alert status. Please investigate the issue.\n - Trigger: {{ctx.trigger.name}}\n - Severity: {{ctx.trigger.severity}}\n @@ -2631,10 +2739,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { Query name: {{name}}\n {{/associated_queries}}\n {{/ctx.alerts}} - """.trimIndent() + """.trimIndent() // Only prints a few fields from the doc source - val scriptSource2 = """ + val scriptSource2 = + """ Monitor {{ctx.monitor.name}} just entered alert status. Please investigate the issue.\n - Trigger: {{ctx.trigger.name}}\n - Severity: {{ctx.trigger.severity}}\n @@ -2654,10 +2763,11 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { Query name: {{name}}\n {{/associated_queries}}\n {{/ctx.alerts}} - """.trimIndent() + """.trimIndent() // Doesn't print any document data - val scriptSource3 = """ + val scriptSource3 = + """ Monitor {{ctx.monitor.name}} just entered alert status. Please investigate the issue.\n - Trigger: {{ctx.trigger.name}}\n - Severity: {{ctx.trigger.severity}}\n @@ -2671,23 +2781,25 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { Query name: {{name}}\n {{/associated_queries}}\n {{/ctx.alerts}} - """.trimIndent() + """.trimIndent() // Using 'alert.copy()' here because 'randomAction()' applies the 'template' for the message subject, and message body - val actions = listOf( - randomAction(name = "action1", template = randomTemplateScript("action1 message"), destinationId = createDestination().id) - .copy(messageTemplate = randomTemplateScript(scriptSource1)), - randomAction(name = "action2", template = randomTemplateScript("action2 message"), destinationId = createDestination().id) - .copy(messageTemplate = randomTemplateScript(scriptSource2)), - randomAction(name = "action3", template = randomTemplateScript("action3 message"), destinationId = createDestination().id) - .copy(messageTemplate = randomTemplateScript(scriptSource3)) - ) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions)) + val actions = + listOf( + randomAction(name = "action1", template = randomTemplateScript("action1 message"), destinationId = createDestination().id) + .copy(messageTemplate = randomTemplateScript(scriptSource1)), + randomAction(name = "action2", template = randomTemplateScript("action2 message"), destinationId = createDestination().id) + .copy(messageTemplate = randomTemplateScript(scriptSource2)), + randomAction(name = "action3", template = randomTemplateScript("action3 message"), destinationId = createDestination().id) + .copy(messageTemplate = randomTemplateScript(scriptSource3)), + ) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = actions)), + ), ) - ) indexDoc(index, "", testDoc) @@ -2699,45 +2811,55 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val triggerResults = output.objectMap("trigger_results") assertEquals(1, triggerResults.values.size) - val expectedMessageContents = mapOf( - "action1" to Pair( - // First item in pair is INCLUDED content - listOf( - "Test field: us-west-2", - "Message: Test message", - "Timestamp: $testTime", - "Query ID: ${docQuery.id}", - "Query name: ${docQuery.name}", - ), - // Second item in pair is EXCLUDED content - listOf() - ), - "action2" to Pair( - // First item in pair is INCLUDED content - listOf( - "Test field: us-west-2", - "Message: Test message", - "Query ID: ${docQuery.id}", - "Query name: ${docQuery.name}", - ), - // Second item in pair is EXCLUDED content - listOf("Timestamp: $testTime") - ), - "action3" to Pair( - // First item in pair is INCLUDED content - listOf( - "Query ID: ${docQuery.id}", - "Query name: ${docQuery.name}", - ), - // Second item in pair is EXCLUDED content - listOf( - "Test field: us-west-2", - "Message: Test message", - "Timestamp: $testTime", - ) - ), - ) - val actionResults = triggerResults.values.first().objectMap("action_results").values.first().values + val expectedMessageContents = + mapOf( + "action1" to + Pair( + // First item in pair is INCLUDED content + listOf( + "Test field: us-west-2", + "Message: Test message", + "Timestamp: $testTime", + "Query ID: ${docQuery.id}", + "Query name: ${docQuery.name}", + ), + // Second item in pair is EXCLUDED content + listOf(), + ), + "action2" to + Pair( + // First item in pair is INCLUDED content + listOf( + "Test field: us-west-2", + "Message: Test message", + "Query ID: ${docQuery.id}", + "Query name: ${docQuery.name}", + ), + // Second item in pair is EXCLUDED content + listOf("Timestamp: $testTime"), + ), + "action3" to + Pair( + // First item in pair is INCLUDED content + listOf( + "Query ID: ${docQuery.id}", + "Query name: ${docQuery.name}", + ), + // Second item in pair is EXCLUDED content + listOf( + "Test field: us-west-2", + "Message: Test message", + "Timestamp: $testTime", + ), + ), + ) + val actionResults = + triggerResults.values + .first() + .objectMap("action_results") + .values + .first() + .values @Suppress("UNCHECKED_CAST") actionResults.forEach { action -> val messageContent = ((action as Map)["output"] as Map)["message"] as String @@ -2763,14 +2885,15 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - enabled = true, - schedule = IntervalSchedule(1, ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + enabled = true, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES), + ), ) - ) assertNotNull(monitor.id) indexDoc(testIndex, "1", testDoc) @@ -2781,7 +2904,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { indexDoc(testIndex, "7", testDoc) OpenSearchTestCase.waitUntil( - { searchFindings(monitor).size == 6 && searchAlertsWithFilter(monitor).size == 6 }, 2, TimeUnit.MINUTES + { searchFindings(monitor).size == 6 && searchAlertsWithFilter(monitor).size == 6 }, + 2, + TimeUnit.MINUTES, ) indexDoc(testIndex, "11", testDoc) @@ -2792,7 +2917,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { indexDoc(testIndex, "17", testDoc) OpenSearchTestCase.waitUntil( - { searchFindings(monitor).size == 6 && searchAlertsWithFilter(monitor).size == 6 }, 2, TimeUnit.MINUTES + { searchFindings(monitor).size == 6 && searchAlertsWithFilter(monitor).size == 6 }, + 2, + TimeUnit.MINUTES, ) } @@ -2801,27 +2928,28 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createIndexAlias( aliasName, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } """.trimIndent(), - "\"index.number_of_shards\": 7" + "\"index.number_of_shards\": 7", ) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", fields = listOf(), name = "3") val docLevelInput = DocLevelMonitorInput("description", listOf(aliasName), listOf(docQuery)) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), - enabled = true, - schedule = IntervalSchedule(1, ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + enabled = true, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES), + ), ) - ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -2837,7 +2965,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { indexDoc(aliasName, "6", testDoc) indexDoc(aliasName, "7", testDoc) OpenSearchTestCase.waitUntil( - { searchFindings(monitor).size == 6 }, 2, TimeUnit.MINUTES + { searchFindings(monitor).size == 6 }, + 2, + TimeUnit.MINUTES, ) rolloverDatastream(aliasName) @@ -2848,7 +2978,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { indexDoc(aliasName, "16", testDoc) indexDoc(aliasName, "17", testDoc) OpenSearchTestCase.waitUntil( - { searchFindings(monitor).size == 6 }, 2, TimeUnit.MINUTES + { searchFindings(monitor).size == 6 }, + 2, + TimeUnit.MINUTES, ) deleteDataStream(aliasName) @@ -2859,27 +2991,28 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { createIndexAlias( aliasName, """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } """.trimIndent(), - "\"index.number_of_shards\": 7" + "\"index.number_of_shards\": 7", ) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", fields = listOf(), name = "3") val docLevelInput = DocLevelMonitorInput("description", listOf(aliasName), listOf(docQuery), false) val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), - enabled = true, - schedule = IntervalSchedule(1, ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + enabled = true, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES), + ), ) - ) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -2897,7 +3030,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { OpenSearchTestCase.waitUntil( { searchFindings(monitor).size == 6 && searchAlertsWithFilter(monitor).size == 1 }, 2, - TimeUnit.MINUTES + TimeUnit.MINUTES, ) rolloverDatastream(aliasName) @@ -2910,7 +3043,7 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { OpenSearchTestCase.waitUntil( { searchFindings(monitor).size == 6 && searchAlertsWithFilter(monitor).size == 1 }, 2, - TimeUnit.MINUTES + TimeUnit.MINUTES, ) deleteDataStream(aliasName) @@ -2929,15 +3062,16 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor( - name = "__lag-monitor-test__", - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), - enabled = true + val monitor = + createMonitor( + randomDocumentLevelMonitor( + name = "__lag-monitor-test__", + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), + enabled = true, + ), ) - ) assertNotNull(monitor.id) assertNotNull(monitor.id) @@ -2950,7 +3084,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val res = (searchFindings(monitor).size == 2) found.set(res) found.get() - }, 2, TimeUnit.MINUTES + }, + 2, + TimeUnit.MINUTES, ) assertEquals(found.get(), true) @@ -2971,7 +3107,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val res = (searchFindings(monitor).size == 4) found.set(res) found.get() - }, 2, TimeUnit.MINUTES + }, + 2, + TimeUnit.MINUTES, ) assertEquals(found.get(), true) assertTrue(true) @@ -2990,15 +3128,16 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor( - name = "__lag-monitor-test__", - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), - enabled = true + val monitor = + createMonitor( + randomDocumentLevelMonitor( + name = "__lag-monitor-test__", + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), + enabled = true, + ), ) - ) assertNotNull(monitor.id) assertNotNull(monitor.id) @@ -3011,7 +3150,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val res = (searchFindings(monitor).size == 2) found.set(res) found.get() - }, 2, TimeUnit.MINUTES + }, + 2, + TimeUnit.MINUTES, ) assertEquals(found.get(), true) @@ -3028,7 +3169,9 @@ class DocumentMonitorRunnerIT : AlertingRestTestCase() { val res = (searchFindings(monitor).size == 4) found.set(res) found.get() - }, 2, TimeUnit.MINUTES + }, + 2, + TimeUnit.MINUTES, ) assertEquals(found.get(), false) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt index b4f2e8db1..20b79af8f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorDataSourcesIT.kt @@ -91,15 +91,15 @@ import java.util.concurrent.TimeUnit import java.util.stream.Collectors class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { - fun `test execute monitor with dryrun`() { val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -116,14 +116,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 0) - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 0) try { @@ -141,11 +143,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customAlertsIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(alertsIndex = customAlertsIndex) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(alertsIndex = customAlertsIndex), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -163,26 +166,29 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val alerts = searchAlerts(id, customAlertsIndex) assertEquals("Alert saved for test monitor", 1, alerts.size) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val alertId = getAlertsResponse.alerts.get(0).id - val acknowledgeAlertResponse = client().execute( - AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, - AcknowledgeAlertRequest(id, listOf(alertId), WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() + val acknowledgeAlertResponse = + client() + .execute( + AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, + AcknowledgeAlertRequest(id, listOf(alertId), WriteRequest.RefreshPolicy.IMMEDIATE), + ).get() Assert.assertEquals(acknowledgeAlertResponse.acknowledged.size, 1) } fun `test mappings parsing`() { - val index1 = "index_123" val index2 = "index_456" val index3 = "index_789" @@ -197,82 +203,109 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { createIndex(index3, Settings.EMPTY) createIndex(index4, Settings.EMPTY) - val m1 = """{ - "properties": { - "properties": { - "type": "keyword" - } - } - } - """.trimIndent() - client().admin().indices().putMapping(PutMappingRequest(index1).source(m1, XContentType.JSON)).get() - - val m2 = """{ - "properties": { - "type": { + val m1 = + """ + { "properties": { - "properties": { "type": "keyword" } + "properties": { + "type": "keyword" + } } - } - } - } - """.trimIndent() - client().admin().indices().putMapping(PutMappingRequest(index2).source(m2, XContentType.JSON)).get() + } + """.trimIndent() + client() + .admin() + .indices() + .putMapping(PutMappingRequest(index1).source(m1, XContentType.JSON)) + .get() - val m3 = """{ - "properties": { - "type": { + val m2 = + """ + { "properties": { - "something": { - "properties" : { + "type": { + "properties": { "properties": { "type": "keyword" } } } } - } - } - } - """.trimIndent() - client().admin().indices().putMapping(PutMappingRequest(index3).source(m3, XContentType.JSON)).get() + } + """.trimIndent() + client() + .admin() + .indices() + .putMapping(PutMappingRequest(index2).source(m2, XContentType.JSON)) + .get() - val m4 = """{ - "properties": { - "type": { + val m3 = + """ + { "properties": { - "something": { - "properties" : { - "properties": { - "properties": { - "lastone": { "type": "keyword" } + "type": { + "properties": { + "something": { + "properties" : { + "properties": { "type": "keyword" } } } } } } - } - } - } - """.trimIndent() - client().admin().indices().putMapping(PutMappingRequest(index4).source(m4, XContentType.JSON)).get() + } + """.trimIndent() + client() + .admin() + .indices() + .putMapping(PutMappingRequest(index3).source(m3, XContentType.JSON)) + .get() - val docLevelInput = DocLevelMonitorInput( - "description", - listOf(index1, index2, index3, index4), - listOf(q1, q2, q3, q4) - ) + val m4 = + """ + { + "properties": { + "type": { + "properties": { + "something": { + "properties" : { + "properties": { + "properties": { + "lastone": { "type": "keyword" } + } + } + } + } + } + } + } + } + """.trimIndent() + client() + .admin() + .indices() + .putMapping(PutMappingRequest(index4).source(m4, XContentType.JSON)) + .get() + + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index1, index2, index3, index4), + listOf(q1, q2, q3, q4), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) val testDoc1 = """{ @@ -300,9 +333,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val findings = searchFindings(id, customFindingsIndex) @@ -312,21 +346,26 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test execute monitor without triggers`() { val docQuery = DocLevelQuery(query = "eventType:\"login\"", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) assertFalse(monitorResponse?.id.isNullOrEmpty()) @@ -343,9 +382,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 0) searchAlerts(id) var table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.isEmpty()) var findings = searchFindings(id, customFindingsIndex) @@ -356,9 +396,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { searchAlerts(id) table = Table("asc", "id", null, 1, 0, "") - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.isEmpty()) @@ -370,66 +411,75 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test all fields fetched and submitted to percolate query when one of the queries doesn't have queryFieldNames`() { // doesn't have query field names so even if other queries pass the wrong fields to query, findings will get generated on matching docs - val docQuery1 = DocLevelQuery( - query = "source.ip.v6.v1:12345", - name = "3", - fields = listOf() - ) - val docQuery2 = DocLevelQuery( - query = "source.ip.v6.v2:16645", - name = "4", - fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") - ) - val docQuery3 = DocLevelQuery( - query = "source.ip.v4.v0:120", - name = "5", - fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") - ) + val docQuery1 = + DocLevelQuery( + query = "source.ip.v6.v1:12345", + name = "3", + fields = listOf(), + ) + val docQuery2 = + DocLevelQuery( + query = "source.ip.v6.v2:16645", + name = "4", + fields = listOf(), + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + ) + val docQuery3 = + DocLevelQuery( + query = "source.ip.v4.v0:120", + name = "5", + fields = listOf(), + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + ) val docQuery4 = DocLevelQuery( query = "alias.some.fff:\"us-west-2\"", name = "6", fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + ) + val docQuery5 = + DocLevelQuery( + query = "message:\"This is an error from IAD region\"", + name = "7", + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + fields = listOf(), ) - val docQuery5 = DocLevelQuery( - query = "message:\"This is an error from IAD region\"", - name = "7", - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), - fields = listOf() - ) val docQuery6 = DocLevelQuery( query = "type.subtype:\"some subtype\"", name = "8", fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), ) val docQuery7 = DocLevelQuery( query = "supertype.type:\"some type\"", name = "9", fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1, docQuery2, docQuery3, docQuery4, docQuery5, docQuery6, docQuery7), ) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1, docQuery2, docQuery3, docQuery4, docQuery5, docQuery6, docQuery7) - ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) // Trying to test here few different "nesting" situations and "wierd" characters @@ -446,7 +496,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { }""" indexDoc(index, "1", testDoc) client().admin().indices().putMapping( - PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field") + PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field"), ) assertFalse(monitorResponse?.id.isNullOrEmpty()) monitor = monitorResponse!!.monitor @@ -456,9 +506,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val findings = searchFindings(id, customFindingsIndex) @@ -469,67 +520,76 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test percolate query failure when queryFieldNames has alias`() { // doesn't have query field names so even if other queries pass the wrong fields to query, findings will get generated on matching docs - val docQuery1 = DocLevelQuery( - query = "source.ip.v6.v1:12345", - name = "3", - fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") - ) - val docQuery2 = DocLevelQuery( - query = "source.ip.v6.v2:16645", - name = "4", - fields = listOf(), - queryFieldNames = listOf("source.ip.v6.v2") - ) - val docQuery3 = DocLevelQuery( - query = "source.ip.v4.v0:120", - name = "5", - fields = listOf(), - queryFieldNames = listOf("source.ip.v6.v4") - ) + val docQuery1 = + DocLevelQuery( + query = "source.ip.v6.v1:12345", + name = "3", + fields = listOf(), + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + ) + val docQuery2 = + DocLevelQuery( + query = "source.ip.v6.v2:16645", + name = "4", + fields = listOf(), + queryFieldNames = listOf("source.ip.v6.v2"), + ) + val docQuery3 = + DocLevelQuery( + query = "source.ip.v4.v0:120", + name = "5", + fields = listOf(), + queryFieldNames = listOf("source.ip.v6.v4"), + ) val docQuery4 = DocLevelQuery( query = "alias.some.fff:\"us-west-2\"", name = "6", fields = listOf(), - queryFieldNames = listOf("alias.some.fff") + queryFieldNames = listOf("alias.some.fff"), + ) + val docQuery5 = + DocLevelQuery( + query = "message:\"This is an error from IAD region\"", + name = "7", + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + fields = listOf(), ) - val docQuery5 = DocLevelQuery( - query = "message:\"This is an error from IAD region\"", - name = "7", - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), - fields = listOf() - ) val docQuery6 = DocLevelQuery( query = "type.subtype:\"some subtype\"", name = "8", fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), ) val docQuery7 = DocLevelQuery( query = "supertype.type:\"some type\"", name = "9", fields = listOf(), - queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1") + queryFieldNames = listOf("alias.some.fff", "source.ip.v6.v1"), + ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1, docQuery2, docQuery3, docQuery4, docQuery5, docQuery6, docQuery7), ) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1, docQuery2, docQuery3, docQuery4, docQuery5, docQuery6, docQuery7) - ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) // Trying to test here few different "nesting" situations and "wierd" characters @@ -546,7 +606,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { }""" indexDoc(index, "1", testDoc) client().admin().indices().putMapping( - PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field") + PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field"), ) assertFalse(monitorResponse?.id.isNullOrEmpty()) monitor = monitorResponse!!.monitor @@ -556,12 +616,18 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 0) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) - Assert.assertTrue(getAlertsResponse.alerts[0].state.toString().equals(Alert.State.ERROR.toString())) + Assert.assertTrue( + getAlertsResponse.alerts[0] + .state + .toString() + .equals(Alert.State.ERROR.toString()), + ) val findings = searchFindings(id, customFindingsIndex) assertEquals("Findings saved for test monitor", 0, findings.size) } @@ -577,24 +643,27 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val q8 = DocLevelQuery(query = "type:\"some type\"", name = "10", fields = listOf()) val q9 = DocLevelQuery(query = "properties:123", name = "11", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", - listOf(index), - listOf(q1, q2, q3, q4, q5, q6, q7, q8, q9) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(q1, q2, q3, q4, q5, q6, q7, q8, q9), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) // Trying to test here few different "nesting" situations and "wierd" characters val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -613,13 +682,18 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { }""" indexDoc(index, "1", testDoc) client().admin().indices().putMapping( - PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field") + PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field"), ) - val mappings = "{\"properties\":{\"type\":{\"type\":\"text\",\"fields\":{\"keyword\":{\"type\":\"keyword\"," + - "\"ignore_above\":256}}},\"query\":{\"type\":\"text\"}}}" - val mappingsResp = client().admin().indices().putMapping( - PutMappingRequest(index).source(mappings, XContentType.JSON) - ).get() + val mappings = + "{\"properties\":{\"type\":{\"type\":\"text\",\"fields\":{\"keyword\":{\"type\":\"keyword\"," + + "\"ignore_above\":256}}},\"query\":{\"type\":\"text\"}}}" + val mappingsResp = + client() + .admin() + .indices() + .putMapping( + PutMappingRequest(index).source(mappings, XContentType.JSON), + ).get() assertFalse(monitorResponse?.id.isNullOrEmpty()) monitor = monitorResponse!!.monitor val id = monitorResponse.id @@ -628,9 +702,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val findings = searchFindings(id, customFindingsIndex) @@ -642,22 +717,27 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test execute monitor with non-flattened json doc as source`() { val docQuery1 = DocLevelQuery(query = "source.device.port:12345 OR source.device.hwd.id:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) val mappings = """{ @@ -681,7 +761,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } }""" - client().admin().indices().putMapping(PutMappingRequest(index).source(mappings, XContentType.JSON)).get() + client() + .admin() + .indices() + .putMapping(PutMappingRequest(index).source(mappings, XContentType.JSON)) + .get() val getFieldCapabilitiesResp = client().fieldCaps(FieldCapabilitiesRequest().indices(index).fields("*")).get() assertTrue(getFieldCapabilitiesResp.getField("source").containsKey("object")) assertTrue(getFieldCapabilitiesResp.getField("source.device").containsKey("object")) @@ -740,9 +824,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val findings = searchFindings(id, customFindingsIndex) @@ -758,22 +843,27 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery5 = DocLevelQuery(query = "message:\"This is an error from IAD region\"", name = "7", fields = listOf()) val docQuery6 = DocLevelQuery(query = "type.subtype:\"some subtype\"", name = "8", fields = listOf()) val docQuery7 = DocLevelQuery(query = "supertype.type:\"some type\"", name = "9", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1, docQuery2, docQuery3, docQuery4, docQuery5, docQuery6, docQuery7) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1, docQuery2, docQuery3, docQuery4, docQuery5, docQuery6, docQuery7), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) // Trying to test here few different "nesting" situations and "wierd" characters @@ -790,7 +880,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { }""" indexDoc(index, "1", testDoc) client().admin().indices().putMapping( - PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field") + PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field"), ) assertFalse(monitorResponse?.id.isNullOrEmpty()) monitor = monitorResponse!!.monitor @@ -800,9 +890,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val findings = searchFindings(id, customFindingsIndex) @@ -813,14 +904,18 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test monitor error alert created and updated with new error`() { val docQuery = DocLevelQuery(query = "source:12345", name = "1", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val testDoc = """{ "message" : "This is an error from IAD region" @@ -833,26 +928,39 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val id = monitorResponse.id // Close index to force error alert - client().admin().indices().close(CloseIndexRequest(index)).get() + client() + .admin() + .indices() + .close(CloseIndexRequest(index)) + .get() var executeMonitorResponse = executeMonitor(monitor, id, false) Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 0) searchAlerts(id) var table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) Assert.assertTrue( getAlertsResponse.alerts[0].errorMessage == - "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; " + "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; ", ) // Reopen index - client().admin().indices().open(OpenIndexRequest(index)).get() + client() + .admin() + .indices() + .open(OpenIndexRequest(index)) + .get() // Close queryIndex - client().admin().indices().close(CloseIndexRequest(DOC_LEVEL_QUERIES_INDEX + INDEX_PATTERN_SUFFIX)).get() + client() + .admin() + .indices() + .close(CloseIndexRequest(DOC_LEVEL_QUERIES_INDEX + INDEX_PATTERN_SUFFIX)) + .get() indexDoc(index, "1", testDoc) @@ -861,14 +969,15 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 0) searchAlerts(id) table = Table("asc", "id", null, 10, 0, "") - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) Assert.assertTrue( getAlertsResponse.alerts[0].errorHistory[0].message == - "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; " + "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; ", ) Assert.assertEquals(1, getAlertsResponse.alerts[0].errorHistory.size) Assert.assertTrue(getAlertsResponse.alerts[0].errorMessage!!.contains("Failed to run percolate search")) @@ -876,14 +985,18 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test monitor error alert created trigger run errored 2 times same error`() { val docQuery = DocLevelQuery(query = "source:12345", name = "1", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val trigger = randomDocumentLevelTrigger(condition = Script("invalid script code")) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) assertFalse(monitorResponse?.id.isNullOrEmpty()) @@ -896,9 +1009,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) var table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) Assert.assertTrue(getAlertsResponse.alerts[0].errorMessage!!.contains("Trigger errors")) @@ -910,9 +1024,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) table = Table("asc", "id", null, 10, 0, "") - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) Assert.assertEquals(0, getAlertsResponse.alerts[0].errorHistory.size) @@ -925,19 +1040,24 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertHistoryIndex = "custom-alert-history-index" val customAlertHistoryIndexPattern = "" val docQuery = DocLevelQuery(query = "source:12345", name = "1", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - alertsIndex = customAlertIndex, - alertsHistoryIndex = customAlertHistoryIndex, - alertsHistoryIndexPattern = customAlertHistoryIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + alertsIndex = customAlertIndex, + alertsHistoryIndex = customAlertHistoryIndex, + alertsHistoryIndexPattern = customAlertHistoryIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) assertFalse(monitorResponse?.id.isNullOrEmpty()) @@ -946,47 +1066,58 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val id = monitorResponse.id // Close index to force error alert - client().admin().indices().close(CloseIndexRequest(index)).get() + client() + .admin() + .indices() + .close(CloseIndexRequest(index)) + .get() var executeMonitorResponse = executeMonitor(monitor, id, false) Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 0) searchAlerts(id) var table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertIndex)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertEquals(1, getAlertsResponse.alerts.size) Assert.assertTrue( getAlertsResponse.alerts[0].errorMessage == - "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; " + "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; ", ) Assert.assertNull(getAlertsResponse.alerts[0].endTime) // Open index to have monitor run successfully - client().admin().indices().open(OpenIndexRequest(index)).get() + client() + .admin() + .indices() + .open(OpenIndexRequest(index)) + .get() // Execute monitor again and expect successful run executeMonitorResponse = executeMonitor(monitor, id, false) Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) // Verify that alert is moved to history index table = Table("asc", "id", null, 10, 0, "") - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertIndex)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertEquals(0, getAlertsResponse.alerts.size) table = Table("asc", "id", null, 10, 0, "") - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertHistoryIndex)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, customAlertHistoryIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertEquals(1, getAlertsResponse.alerts.size) Assert.assertTrue( getAlertsResponse.alerts[0].errorMessage == - "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; " + "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; ", ) Assert.assertNotNull(getAlertsResponse.alerts[0].endTime) } @@ -996,19 +1127,24 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertHistoryIndex = "custom-alert-history-index" val customAlertHistoryIndexPattern = "" val docQuery = DocLevelQuery(query = "source:12345", name = "1", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - alertsIndex = customAlertIndex, - alertsHistoryIndex = customAlertHistoryIndex, - alertsHistoryIndexPattern = customAlertHistoryIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + alertsIndex = customAlertIndex, + alertsHistoryIndex = customAlertHistoryIndex, + alertsHistoryIndexPattern = customAlertHistoryIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) assertFalse(monitorResponse?.id.isNullOrEmpty()) @@ -1017,7 +1153,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val monitorId = monitorResponse.id // Close index to force error alert - client().admin().indices().close(CloseIndexRequest(index)).get() + client() + .admin() + .indices() + .close(CloseIndexRequest(index)) + .get() var executeMonitorResponse = executeMonitor(monitor, monitorId, false) Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) @@ -1025,55 +1165,65 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { // Create 10 old alerts to simulate having "old error alerts"(2.6) for (i in 1..10) { val startTimestamp = Instant.now().minusSeconds(3600 * 24 * i.toLong()).toEpochMilli() - val oldErrorAlertAsString = """ + val oldErrorAlertAsString = + """ {"id":"$i","version":-1,"monitor_id":"$monitorId", "schema_version":4,"monitor_version":1,"monitor_name":"geCNcHKTlp","monitor_user":{"name":"","backend_roles":[], "roles":[],"custom_attribute_names":[],"user_requested_tenant":null},"trigger_id":"_nnk_YcB5pHgSZwYwO2r", "trigger_name":"NoOp trigger","finding_ids":[],"related_doc_ids":[],"state":"ERROR","error_message":"some monitor error", "alert_history":[],"severity":"","action_execution_results":[], "start_time":$startTimestamp,"last_notification_time":$startTimestamp,"end_time":null,"acknowledged_time":null} - """.trimIndent() + """.trimIndent() - client().index( - IndexRequest(customAlertIndex) - .id("$i") - .routing(monitorId) - .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .source(oldErrorAlertAsString, XContentType.JSON) - ).get() + client() + .index( + IndexRequest(customAlertIndex) + .id("$i") + .routing(monitorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(oldErrorAlertAsString, XContentType.JSON), + ).get() } var table = Table("asc", "id", null, 1000, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertIndex)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertEquals(1 + 10, getAlertsResponse.alerts.size) - val newErrorAlert = getAlertsResponse.alerts.firstOrNull { - it.errorMessage == - "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; " - } + val newErrorAlert = + getAlertsResponse.alerts.firstOrNull { + it.errorMessage == + "AlertingException[closed]; nested: Exception[org.opensearch.indices.IndexClosedException: closed]; " + } Assert.assertNotNull(newErrorAlert) Assert.assertNull(newErrorAlert!!.endTime) // Open index to have monitor run successfully - client().admin().indices().open(OpenIndexRequest(index)).get() + client() + .admin() + .indices() + .open(OpenIndexRequest(index)) + .get() // Execute monitor again and expect successful run executeMonitorResponse = executeMonitor(monitor, monitorId, false) Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) // Verify that alert is moved to history index table = Table("asc", "id", null, 1000, 0, "") - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertIndex)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertEquals(0, getAlertsResponse.alerts.size) table = Table("asc", "id", null, 1000, 0, "") - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertHistoryIndex)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, customAlertHistoryIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertEquals(11, getAlertsResponse.alerts.size) getAlertsResponse.alerts.forEach { alert -> assertNotNull(alert.endTime) } @@ -1086,15 +1236,17 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) // We are verifying here that index with nested mappings and nested aliases @@ -1108,11 +1260,14 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { m["rule"] = Map.of("type", "nested", "properties", m1) val properties = Map.of("properties", m) - client().admin().indices().putMapping( - PutMappingRequest( - index - ).source(properties) - ).get() + client() + .admin() + .indices() + .putMapping( + PutMappingRequest( + index, + ).source(properties), + ).get() // Put alias for nested fields val mm: MutableMap = HashMap() @@ -1120,11 +1275,14 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { mm1["title_alias"] = Map.of("type", "alias", "path", "rule.title") mm["rule"] = Map.of("type", "nested", "properties", mm1) val properties1 = Map.of("properties", mm) - client().admin().indices().putMapping( - PutMappingRequest( - index - ).source(properties1) - ).get() + client() + .admin() + .indices() + .putMapping( + PutMappingRequest( + index, + ).source(properties1), + ).get() val testDoc = """{ "rule": {"title": "some_title"}, @@ -1133,7 +1291,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { indexDoc(index, "2", testDoc) client().admin().indices().putMapping( - PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field") + PutMappingRequest(index).source("alias.some.fff", "type=alias,path=test_field.some_other_field"), ) assertFalse(monitorResponse?.id.isNullOrEmpty()) monitor = monitorResponse!!.monitor @@ -1143,9 +1301,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val findings = searchFindings(id, customFindingsIndex) @@ -1165,9 +1324,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "message": "msg 1 2 3 4" }""" indexDoc(index, "2", testDoc) - client().admin().indices() + client() + .admin() + .indices() .create( - CreateIndexRequest(customQueryIndex + "-000001").alias(Alias(customQueryIndex)) + CreateIndexRequest(customQueryIndex + "-000001") + .alias(Alias(customQueryIndex)) .mapping( """ { @@ -1186,25 +1348,37 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } } } - """.trimIndent() - ) + """.trimIndent(), + ), ).get() - client().admin().indices().close(CloseIndexRequest(customQueryIndex + "-000001")).get() - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), - ), - owner = "alerting" - ) + client() + .admin() + .indices() + .close(CloseIndexRequest(customQueryIndex + "-000001")) + .get() + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), + ), + owner = "alerting", + ) try { createMonitor(monitor) fail("monitor creation should fail due to incorrect analyzer name in test setup") } catch (e: Exception) { - Assert.assertEquals(client().search(SearchRequest(SCHEDULED_JOBS_INDEX)).get().hits.hits.size, 0) + Assert.assertEquals( + client() + .search(SearchRequest(SCHEDULED_JOBS_INDEX)) + .get() + .hits.hits.size, + 0, + ) } } @@ -1214,14 +1388,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customQueryIndex = "custom_alerts_index" val analyzer = "whitespace" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), + ), ) - ) var executeMonitorResponse = executeMonitor(monitor, null) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1243,9 +1419,13 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) Assert.assertEquals( - (executeMonitorResponse.monitorRunResult.triggerResults.iterator().next().value as DocumentLevelTriggerRunResult) - .triggeredDocs.size, - 1 + ( + executeMonitorResponse.monitorRunResult.triggerResults + .iterator() + .next() + .value as DocumentLevelTriggerRunResult + ).triggeredDocs.size, + 1, ) } @@ -1255,14 +1435,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customQueryIndex = "custom_alerts_index" val analyzer = "whitespace" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), + ), ) - ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1278,7 +1460,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(id) - val mapping = client().admin().indices().getMappings(GetMappingsRequest().indices(customQueryIndex)).get() + val mapping = + client() + .admin() + .indices() + .getMappings(GetMappingsRequest().indices(customQueryIndex)) + .get() Assert.assertTrue(mapping.toString().contains("\"analyzer\":\"$analyzer\"")) } @@ -1288,14 +1475,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customQueryIndex = "custom_query_index" val analyzer = "whitespace" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + queryIndexMappingsByType = mapOf(Pair("text", mapOf(Pair("analyzer", analyzer)))), + ), ) - ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1311,13 +1500,25 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals(executeMonitorResponse!!.monitorRunResult.monitorName, monitor.name) Assert.assertEquals(executeMonitorResponse.monitorRunResult.triggerResults.size, 1) searchAlerts(monitorId) - val clusterStateResponse = client().admin().cluster().state(ClusterStateRequest().indices(customQueryIndex).metadata(true)).get() - val mapping = client().admin().indices().getMappings(GetMappingsRequest().indices(customQueryIndex)).get() + val clusterStateResponse = + client() + .admin() + .cluster() + .state(ClusterStateRequest().indices(customQueryIndex).metadata(true)) + .get() + val mapping = + client() + .admin() + .indices() + .getMappings(GetMappingsRequest().indices(customQueryIndex)) + .get() Assert.assertTrue(mapping.toString().contains("\"analyzer\":\"$analyzer\"") == true) // Verify queries exist - var searchResponse = client().search( - SearchRequest(customQueryIndex).source(SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) - ).get() + var searchResponse = + client() + .search( + SearchRequest(customQueryIndex).source(SearchSourceBuilder().query(QueryBuilders.matchAllQuery())), + ).get() assertNotEquals(0, searchResponse.hits.hits.size) deleteMonitor(monitorId) @@ -1331,11 +1532,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern), + ) val monitorResponse = createMonitor(monitor) client().admin().indices().refresh(RefreshRequest("*")) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -1370,7 +1572,6 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } fun `test execute monitor with multiple indices in input success`() { - val testSourceIndex1 = "test_source_index1" val testSourceIndex2 = "test_source_index2" @@ -1382,11 +1583,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern), + ) val monitorResponse = createMonitor(monitor) client().admin().indices().refresh(RefreshRequest("*")) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -1439,11 +1641,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern), + ) val monitorResponse = createMonitor(monitor) client().admin().indices().refresh(RefreshRequest("*")) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -1457,7 +1660,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { indexDoc(testSourceIndex2, "1", testDoc) - client().admin().indices().delete(DeleteIndexRequest(testSourceIndex1)).get() + client() + .admin() + .indices() + .delete(DeleteIndexRequest(testSourceIndex1)) + .get() val id = monitorResponse.id var executeMonitorResponse = executeMonitor(monitor, id, false) @@ -1496,11 +1703,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex, findingsIndexPattern = customFindingsIndexPattern), + ) val monitorResponse = createMonitor(monitor) client().admin().indices().refresh(RefreshRequest("*")) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -1514,7 +1722,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { indexDoc(testSourceIndex1, "1", testDoc) - client().admin().indices().delete(DeleteIndexRequest(testSourceIndex2)).get() + client() + .admin() + .indices() + .delete(DeleteIndexRequest(testSourceIndex2)) + .get() val id = monitorResponse.id var executeMonitorResponse = executeMonitor(monitor, id, false) @@ -1542,67 +1754,71 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } fun `test execute pre-existing monitor and update`() { - val request = CreateIndexRequest(SCHEDULED_JOBS_INDEX).mapping(ScheduledJobIndices.scheduledJobMappings()) - .settings(Settings.builder().put("index.hidden", true).build()) + val request = + CreateIndexRequest(SCHEDULED_JOBS_INDEX) + .mapping(ScheduledJobIndices.scheduledJobMappings()) + .settings(Settings.builder().put("index.hidden", true).build()) client().admin().indices().create(request) - val monitorStringWithoutName = """ - { - "monitor": { - "type": "monitor", - "schema_version": 0, - "name": "UayEuXpZtb", - "monitor_type": "doc_level_monitor", - "user": { - "name": "", - "backend_roles": [], - "roles": [], - "custom_attribute_names": [], - "user_requested_tenant": null - }, - "enabled": true, - "enabled_time": 1662753436791, - "schedule": { - "period": { - "interval": 5, - "unit": "MINUTES" - } - }, - "inputs": [{ - "doc_level_input": { - "description": "description", - "indices": [ - "$index" - ], - "queries": [{ - "id": "63efdcce-b5a1-49f4-a25f-6b5f9496a755", - "name": "3", - "query": "test_field:\"us-west-2\"", - "tags": [] - }] - } - }], - "triggers": [{ - "document_level_trigger": { - "id": "OGnTI4MBv6qt0ATc9Phk", - "name": "mrbHRMevYI", - "severity": "1", - "condition": { - "script": { - "source": "return true", - "lang": "painless" - } - }, - "actions": [] - } - }], - "last_update_time": 1662753436791 - } - } - """.trimIndent() + val monitorStringWithoutName = + """ + { + "monitor": { + "type": "monitor", + "schema_version": 0, + "name": "UayEuXpZtb", + "monitor_type": "doc_level_monitor", + "user": { + "name": "", + "backend_roles": [], + "roles": [], + "custom_attribute_names": [], + "user_requested_tenant": null + }, + "enabled": true, + "enabled_time": 1662753436791, + "schedule": { + "period": { + "interval": 5, + "unit": "MINUTES" + } + }, + "inputs": [{ + "doc_level_input": { + "description": "description", + "indices": [ + "$index" + ], + "queries": [{ + "id": "63efdcce-b5a1-49f4-a25f-6b5f9496a755", + "name": "3", + "query": "test_field:\"us-west-2\"", + "tags": [] + }] + } + }], + "triggers": [{ + "document_level_trigger": { + "id": "OGnTI4MBv6qt0ATc9Phk", + "name": "mrbHRMevYI", + "severity": "1", + "condition": { + "script": { + "source": "return true", + "lang": "painless" + } + }, + "actions": [] + } + }], + "last_update_time": 1662753436791 + } + } + """.trimIndent() val monitorId = "abc" indexDoc(SCHEDULED_JOBS_INDEX, monitorId, monitorStringWithoutName) - val monitorMetadata = """ + val monitorMetadata = + """ { "metadata": { "monitor_id": "$monitorId", @@ -1619,7 +1835,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } } } - """.trimIndent() + """.trimIndent() indexDoc(SCHEDULED_JOBS_INDEX, "$monitorId-metadata", monitorMetadata) val getMonitorResponse = getMonitorResponse(monitorId) Assert.assertNotNull(getMonitorResponse) @@ -1644,18 +1860,20 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex = "custom_alerts_index" val customQueryIndex = "custom_query_index" val customFindingsIndex = "custom_findings_index" - val updateMonitorResponse = updateMonitor( - monitor.copy( - id = monitorId, - owner = "security_analytics_plugin", - dataSources = DataSources( - alertsIndex = customAlertsIndex, - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex - ) - ), - monitorId - ) + val updateMonitorResponse = + updateMonitor( + monitor.copy( + id = monitorId, + owner = "security_analytics_plugin", + dataSources = + DataSources( + alertsIndex = customAlertsIndex, + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + ), + ), + monitorId, + ) Assert.assertNotNull(updateMonitorResponse) Assert.assertEquals(updateMonitorResponse!!.monitor.owner, "security_analytics_plugin") indexDoc(index, "2", testDoc) @@ -1668,77 +1886,85 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndexAlerts = searchAlerts(monitorId, customAlertsIndex) assertEquals("Alert saved for test monitor", 1, customAlertsIndexAlerts.size) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, null)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", monitorId, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) val searchRequest = SearchRequest(SCHEDULED_JOBS_INDEX) var searchMonitorResponse = - client().execute(AlertingActions.SEARCH_MONITORS_ACTION_TYPE, SearchMonitorRequest(searchRequest)) + client() + .execute(AlertingActions.SEARCH_MONITORS_ACTION_TYPE, SearchMonitorRequest(searchRequest)) .get() Assert.assertEquals(searchMonitorResponse.hits.hits.size, 0) searchRequest.source().query(MatchQueryBuilder("monitor.owner", "security_analytics_plugin")) searchMonitorResponse = - client().execute(AlertingActions.SEARCH_MONITORS_ACTION_TYPE, SearchMonitorRequest(searchRequest)) + client() + .execute(AlertingActions.SEARCH_MONITORS_ACTION_TYPE, SearchMonitorRequest(searchRequest)) .get() Assert.assertEquals(searchMonitorResponse.hits.hits.size, 1) } fun `test execute pre-existing monitor without triggers`() { - val request = CreateIndexRequest(SCHEDULED_JOBS_INDEX).mapping(ScheduledJobIndices.scheduledJobMappings()) - .settings(Settings.builder().put("index.hidden", true).build()) + val request = + CreateIndexRequest(SCHEDULED_JOBS_INDEX) + .mapping(ScheduledJobIndices.scheduledJobMappings()) + .settings(Settings.builder().put("index.hidden", true).build()) client().admin().indices().create(request) - val monitorStringWithoutName = """ - { - "monitor": { - "type": "monitor", - "schema_version": 0, - "name": "UayEuXpZtb", - "monitor_type": "doc_level_monitor", - "user": { - "name": "", - "backend_roles": [], - "roles": [], - "custom_attribute_names": [], - "user_requested_tenant": null - }, - "enabled": true, - "enabled_time": 1662753436791, - "schedule": { - "period": { - "interval": 5, - "unit": "MINUTES" - } - }, - "inputs": [{ - "doc_level_input": { - "description": "description", - "indices": [ - "$index" - ], - "queries": [{ - "id": "63efdcce-b5a1-49f4-a25f-6b5f9496a755", - "name": "3", - "query": "test_field:\"us-west-2\"", - "tags": [] - }] - } - }], - "triggers": [], - "last_update_time": 1662753436791 - } - } - """.trimIndent() + val monitorStringWithoutName = + """ + { + "monitor": { + "type": "monitor", + "schema_version": 0, + "name": "UayEuXpZtb", + "monitor_type": "doc_level_monitor", + "user": { + "name": "", + "backend_roles": [], + "roles": [], + "custom_attribute_names": [], + "user_requested_tenant": null + }, + "enabled": true, + "enabled_time": 1662753436791, + "schedule": { + "period": { + "interval": 5, + "unit": "MINUTES" + } + }, + "inputs": [{ + "doc_level_input": { + "description": "description", + "indices": [ + "$index" + ], + "queries": [{ + "id": "63efdcce-b5a1-49f4-a25f-6b5f9496a755", + "name": "3", + "query": "test_field:\"us-west-2\"", + "tags": [] + }] + } + }], + "triggers": [], + "last_update_time": 1662753436791 + } + } + """.trimIndent() val monitorId = "abc" indexDoc(SCHEDULED_JOBS_INDEX, monitorId, monitorStringWithoutName) - val monitorMetadata = """ + val monitorMetadata = + """ { "metadata": { "monitor_id": "$monitorId", @@ -1755,7 +1981,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } } } - """.trimIndent() + """.trimIndent() indexDoc(SCHEDULED_JOBS_INDEX, "$monitorId-metadata", monitorMetadata) val getMonitorResponse = getMonitorResponse(monitorId) Assert.assertNotNull(getMonitorResponse) @@ -1787,11 +2013,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1828,11 +2055,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1856,7 +2084,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Findings mismatch between manually searched and fetched via GetFindingsAction", findings.get(0).id, - findingsFromAPI.get(0).id + findingsFromAPI.get(0).id, ) } @@ -1865,11 +2093,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1895,7 +2124,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetMonitor Action error ", - it.contains("Monitor not found") + it.contains("Monitor not found"), ) } } @@ -1906,12 +2135,13 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customAlertsIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(alertsIndex = customAlertsIndex), - owner = "owner" - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(alertsIndex = customAlertsIndex), + owner = "owner", + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1930,14 +2160,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val alerts = searchAlerts(id, customAlertsIndex) assertEquals("Alert saved for test monitor", 1, alerts.size) val table = Table("asc", "id", null, 1, 0, "") - var getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) - .get() + var getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", null, customAlertsIndex)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) - getAlertsResponse = client() - .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) - .get() + getAlertsResponse = + client() + .execute(AlertingActions.GET_ALERTS_ACTION_TYPE, GetAlertsRequest(table, "ALL", "ALL", id, null)) + .get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 1) } @@ -1947,11 +2179,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(findingsIndex = customFindingsIndex) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(findingsIndex = customFindingsIndex), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -1977,7 +2210,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetMonitor Action error ", - it.contains("no such index") + it.contains("no such index"), ) } } @@ -1991,15 +2224,17 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex = "custom_alerts_index" val customAlertsHistoryIndex = "custom_alerts_history_index" val customAlertsHistoryIndexPattern = "" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger1, trigger2), - dataSources = DataSources( - alertsIndex = customAlertsIndex, - alertsHistoryIndex = customAlertsHistoryIndex, - alertsHistoryIndexPattern = customAlertsHistoryIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger1, trigger2), + dataSources = + DataSources( + alertsIndex = customAlertsIndex, + alertsHistoryIndex = customAlertsHistoryIndex, + alertsHistoryIndexPattern = customAlertsHistoryIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -2039,15 +2274,17 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex = "custom_alerts_index" val customAlertsHistoryIndex = "custom_alerts_history_index" val customAlertsHistoryIndexPattern = "" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger1, trigger2), - dataSources = DataSources( - alertsIndex = customAlertsIndex, - alertsHistoryIndex = customAlertsHistoryIndex, - alertsHistoryIndexPattern = customAlertsHistoryIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger1, trigger2), + dataSources = + DataSources( + alertsIndex = customAlertsIndex, + alertsHistoryIndex = customAlertsHistoryIndex, + alertsHistoryIndexPattern = customAlertsHistoryIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -2105,10 +2342,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -2121,10 +2359,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val id = monitorResponse.id - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse1 = createMonitor(monitor1) monitor1 = monitorResponse1!!.monitor val id1 = monitorResponse1.id @@ -2136,57 +2375,61 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val alerts1 = searchAlerts(id) assertEquals("Alert saved for test monitor", 1, alerts1.size) val table = Table("asc", "id", null, 1000, 0, "") - var getAlertsResponse = client() - .execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest(table, "ALL", "ALL", null, null) - ) - .get() + var getAlertsResponse = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", null, null), + ).get() Assert.assertTrue(getAlertsResponse != null) Assert.assertTrue(getAlertsResponse.alerts.size == 2) - var alertsResponseForRequestWithoutCustomIndex = client() - .execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest(table, "ALL", "ALL", null, null, monitorIds = listOf(id, id1, "1", "2")) - ) - .get() + var alertsResponseForRequestWithoutCustomIndex = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", null, null, monitorIds = listOf(id, id1, "1", "2")), + ).get() Assert.assertTrue(alertsResponseForRequestWithoutCustomIndex != null) Assert.assertTrue(alertsResponseForRequestWithoutCustomIndex.alerts.size == 2) - val alertIds = getAlertsResponse.alerts.stream().map { alert -> alert.id }.collect(Collectors.toList()) - var getAlertsByAlertIds = client() - .execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest(table, "ALL", "ALL", null, null, alertIds = alertIds) - ) - .get() + val alertIds = + getAlertsResponse.alerts + .stream() + .map { alert -> alert.id } + .collect(Collectors.toList()) + var getAlertsByAlertIds = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", null, null, alertIds = alertIds), + ).get() Assert.assertTrue(getAlertsByAlertIds != null) Assert.assertTrue(getAlertsByAlertIds.alerts.size == 2) - var getAlertsByWrongAlertIds = client() - .execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest(table, "ALL", "ALL", null, null, alertIds = listOf("1", "2")) - ) - .get() + var getAlertsByWrongAlertIds = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", null, null, alertIds = listOf("1", "2")), + ).get() Assert.assertTrue(getAlertsByWrongAlertIds != null) Assert.assertEquals(getAlertsByWrongAlertIds.alerts.size, 0) } fun `test queryIndex rollover and delete monitor success`() { - val testSourceIndex = "test_source_index" createIndex(testSourceIndex, Settings.builder().put("index.mapping.total_fields.limit", "10000").build()) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(testSourceIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) // This doc should create close to 1000 (limit) fields in index mapping. It's easier to add mappings like this then via api val docPayload: StringBuilder = StringBuilder(100000) docPayload.append("{") @@ -2225,7 +2468,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { // Both monitors used same queryIndex alias. Since source index has close to limit amount of fields in mappings, // we expect that creation of second monitor would trigger rollover of queryIndex var getIndexResponse: GetIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() assertEquals(2, getIndexResponse.indices.size) assertEquals(DOC_LEVEL_QUERIES_INDEX + "-000001", getIndexResponse.indices[0]) assertEquals(DOC_LEVEL_QUERIES_INDEX + "-000002", getIndexResponse.indices[1]) @@ -2248,36 +2495,48 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertTrue(alerts != null) Assert.assertTrue(alerts.size == 2) // Delete monitor #1 - client().execute( - AlertingActions.DELETE_MONITOR_ACTION_TYPE, DeleteMonitorRequest(monitorResponse.id, WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() + client() + .execute( + AlertingActions.DELETE_MONITOR_ACTION_TYPE, + DeleteMonitorRequest(monitorResponse.id, WriteRequest.RefreshPolicy.IMMEDIATE), + ).get() // Expect first concrete queryIndex to be deleted since that one was only used by this monitor getIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() assertEquals(1, getIndexResponse.indices.size) assertEquals(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "-000002", getIndexResponse.indices[0]) // Delete monitor #2 - client().execute( - AlertingActions.DELETE_MONITOR_ACTION_TYPE, DeleteMonitorRequest(monitorResponse2.id, WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() + client() + .execute( + AlertingActions.DELETE_MONITOR_ACTION_TYPE, + DeleteMonitorRequest(monitorResponse2.id, WriteRequest.RefreshPolicy.IMMEDIATE), + ).get() // Expect second concrete queryIndex to be deleted since that one was only used by this monitor getIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() assertEquals(0, getIndexResponse.indices.size) } fun `test queryIndex rollover failure source_index field count over limit`() { - val testSourceIndex = "test_source_index" createIndex(testSourceIndex, Settings.EMPTY) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(testSourceIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) // This doc should create 999 fields in mapping, only 1 field less then limit val docPayload: StringBuilder = StringBuilder(100000) docPayload.append("{") @@ -2302,10 +2561,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(testSourceIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) // Create doc with 11 fields val docPayload: StringBuilder = StringBuilder(1000) docPayload.append("{") @@ -2344,7 +2604,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { // Both monitors used same queryIndex. Since source index has well below limit amount of fields in mappings, // we expect only 1 backing queryIndex val getIndexResponse: GetIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() assertEquals(1, getIndexResponse.indices.size) // Now we'll verify that execution of both monitors work indexDoc(testSourceIndex, "3", testDoc) @@ -2380,12 +2644,13 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(testSourceIndex1), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(), - owner = "alerting" - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(), + owner = "alerting", + ) // This doc should create close to 10000 (limit) fields in index mapping. It's easier to add mappings like this then via api val docPayload: StringBuilder = StringBuilder(100000) docPayload.append("{") @@ -2402,23 +2667,33 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { monitor = monitorResponse!!.monitor // Update monitor and change input - val updatedMonitor = monitor.copy( - inputs = listOf( - DocLevelMonitorInput("description", listOf(testSourceIndex2), listOf(docQuery)) + val updatedMonitor = + monitor.copy( + inputs = + listOf( + DocLevelMonitorInput("description", listOf(testSourceIndex2), listOf(docQuery)), + ), ) - ) updateMonitor(updatedMonitor, updatedMonitor.id) assertFalse(monitorResponse?.id.isNullOrEmpty()) // Expect queryIndex to rollover after setting new source_index with close to limit amount of fields in mappings var getIndexResponse: GetIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() assertEquals(2, getIndexResponse.indices.size) deleteMonitor(updatedMonitor.id) waitUntil { getIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() return@waitUntil getIndexResponse.indices.isEmpty() } assertEquals(0, getIndexResponse.indices.size) @@ -2430,10 +2705,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(testSourceIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) // This doc should create 12000 fields in index mapping. It's easier to add mappings like this then via api val docPayload: StringBuilder = StringBuilder(100000) docPayload.append("{") @@ -2450,17 +2726,27 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { // Expect queryIndex to rollover after setting new source_index with close to limit amount of fields in mappings var getIndexResponse: GetIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() assertEquals(1, getIndexResponse.indices.size) - val field_max_limit = getIndexResponse - .getSetting(DOC_LEVEL_QUERIES_INDEX + "-000001", MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.key).toInt() + val fieldMaxLimit = + getIndexResponse + .getSetting(DOC_LEVEL_QUERIES_INDEX + "-000001", MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.key) + .toInt() - assertEquals(10000 + DocLevelMonitorQueries.QUERY_INDEX_BASE_FIELDS_COUNT, field_max_limit) + assertEquals(10000 + DocLevelMonitorQueries.QUERY_INDEX_BASE_FIELDS_COUNT, fieldMaxLimit) deleteMonitor(monitorResponse.id) waitUntil { getIndexResponse = - client().admin().indices().getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")).get() + client() + .admin() + .indices() + .getIndex(GetIndexRequest().indices(ScheduledJob.DOC_LEVEL_QUERIES_INDEX + "*")) + .get() return@waitUntil getIndexResponse.indices.isEmpty() } assertEquals(0, getIndexResponse.indices.size) @@ -2476,10 +2762,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(testSourceIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) // This doc should create 999 fields in mapping, only 1 field less then limit val docPayload = "{\"test_field\" : \"us-west-2\" }" // Create monitor @@ -2503,50 +2790,58 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { // TODO - revisit single node integ tests setup to figure out why we cannot have multiple test classes implementing it fun `test execute workflow with custom alerts and finding index when bucket monitor is used in chained finding of doc monitor`() { - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val customAlertsHistoryIndex = "custom_alerts_history_index" val customAlertsHistoryIndexPattern = "" val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Bucket level monitor will reduce the size of matched doc ids on those that belong // to a bucket that contains more than 1 document after term grouping - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null, + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val bucketCustomAlertsIndex = "custom_alerts_index" val bucketCustomFindingsIndex = "custom_findings_index" val bucketCustomFindingsIndexPattern = "custom_findings_index-1" - val bucketLevelMonitorResponse = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources( - findingsEnabled = true, - alertsIndex = bucketCustomAlertsIndex, - findingsIndex = bucketCustomFindingsIndex, - findingsIndexPattern = bucketCustomFindingsIndexPattern - ) - ) - )!! + val bucketLevelMonitorResponse = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = + DataSources( + findingsEnabled = true, + alertsIndex = bucketCustomAlertsIndex, + findingsIndex = bucketCustomFindingsIndex, + findingsIndexPattern = bucketCustomFindingsIndexPattern, + ), + ), + )!! val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1", fields = listOf()) val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_1\"", name = "2", fields = listOf()) @@ -2556,23 +2851,26 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docCustomAlertsIndex = "custom_alerts_index" val docCustomFindingsIndex = "custom_findings_index" val docCustomFindingsIndexPattern = "custom_findings_index-1" - var docLevelMonitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(docTrigger), - dataSources = DataSources( - alertsIndex = docCustomAlertsIndex, - findingsIndex = docCustomFindingsIndex, - findingsIndexPattern = docCustomFindingsIndexPattern + var docLevelMonitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(docTrigger), + dataSources = + DataSources( + alertsIndex = docCustomAlertsIndex, + findingsIndex = docCustomFindingsIndex, + findingsIndexPattern = docCustomFindingsIndexPattern, + ), ) - ) val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) - var workflow = randomWorkflow( - monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id), - enabled = false, - auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id), + enabled = false, + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -2585,8 +2883,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_value_1", // adding duplicate to verify aggregation "test_value_2", "test_value_2", - "test_value_3" - ) + "test_value_3", + ), ) val workflowId = workflowResponse.id @@ -2600,8 +2898,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val searchResult = monitorRunResults.inputResults.results.first() @Suppress("UNCHECKED_CAST") - val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg") - ?.get("buckets") as List> + val buckets = + searchResult + .stringMap("aggregations") + ?.stringMap("composite_agg") + ?.get("buckets") as List> assertEquals("Incorrect search result", 3, buckets.size) val getAlertsResponse = assertAlerts(bucketLevelMonitorResponse.id, bucketCustomAlertsIndex, 2, workflowId) @@ -2628,99 +2929,116 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1", fields = listOf()) val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_3\"", name = "2", fields = listOf()) - var docLevelMonitor = randomDocumentLevelMonitor( - inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2))), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), - dataSources = DataSources( - alertsIndex = "custom_alerts_index", - findingsIndex = "custom_findings_index", - findingsIndexPattern = "custom_findings_index-1" + var docLevelMonitor = + randomDocumentLevelMonitor( + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = + DataSources( + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1", + ), ) - ) val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null, - ) - ) - - val bucketLevelMonitorResponse = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources( - findingsEnabled = true, - alertsIndex = "custom_alerts_index", - findingsIndex = "custom_findings_index", - findingsIndexPattern = "custom_findings_index-1" - ) + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - )!! - var docLevelMonitor1 = randomDocumentLevelMonitor( - // Match the documents with test_field_1: test_value_3 - inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery2))), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), - dataSources = DataSources( - findingsEnabled = true, - alertsIndex = "custom_alerts_index_1", - findingsIndex = "custom_findings_index_1", - findingsIndexPattern = "custom_findings_index_1-1" + val bucketLevelMonitorResponse = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = + DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1", + ), + ), + )!! + + var docLevelMonitor1 = + randomDocumentLevelMonitor( + // Match the documents with test_field_1: test_value_3 + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = + DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index_1", + findingsIndex = "custom_findings_index_1", + findingsIndexPattern = "custom_findings_index_1-1", + ), ) - ) val docLevelMonitorResponse1 = createMonitor(docLevelMonitor1)!! - val queryMonitorInput = SearchInput( - indices = listOf(index), - query = SearchSourceBuilder().query( - QueryBuilders - .rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val queryMonitorInput = + SearchInput( + indices = listOf(index), + query = + SearchSourceBuilder().query( + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis"), + ), ) - ) - val queryTriggerScript = """ + val queryTriggerScript = + """ return ctx.results[0].hits.hits.size() > 0 - """.trimIndent() + """.trimIndent() val queryLevelTrigger = randomQueryLevelTrigger(condition = Script(queryTriggerScript)) val queryMonitorResponse = createMonitor(randomQueryLevelMonitor(inputs = listOf(queryMonitorInput), triggers = listOf(queryLevelTrigger)))!! // 1. docMonitor (chainedFinding = null) 2. bucketMonitor (chainedFinding = docMonitor) 3. docMonitor (chainedFinding = bucketMonitor) 4. queryMonitor (chainedFinding = docMonitor 3) - var workflow = randomWorkflow( - monitorIds = listOf( - docLevelMonitorResponse.id, - bucketLevelMonitorResponse.id, - docLevelMonitorResponse1.id, - queryMonitorResponse.id - ), - auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = + listOf( + docLevelMonitorResponse.id, + bucketLevelMonitorResponse.id, + docLevelMonitorResponse1.id, + queryMonitorResponse.id, + ), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -2734,8 +3052,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_value_2", "test_value_2", "test_value_3", - "test_value_3" - ) + "test_value_3", + ), ) val workflowId = workflowResponse.id @@ -2765,9 +3083,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { docLevelMonitorResponse.monitor.dataSources.findingsIndex, 4, 4, - listOf("3", "4", "5", "6") + listOf("3", "4", "5", "6"), ) } + // Verify second bucket level monitor execution, alerts and findings bucketLevelMonitorResponse.monitor.name -> { val searchResult = monitorRunResults.inputResults.results.first() @@ -2775,7 +3094,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { @Suppress("UNCHECKED_CAST") val buckets = searchResult - .stringMap("aggregations")?.stringMap("composite_agg") + .stringMap("aggregations") + ?.stringMap("composite_agg") ?.get("buckets") as List> assertEquals("Incorrect search result", 2, buckets.size) @@ -2784,7 +3104,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { bucketLevelMonitorResponse.id, bucketLevelMonitorResponse.monitor.dataSources.alertsIndex, 2, - workflowId + workflowId, ) assertAcknowledges(getAlertsResponse.alerts, bucketLevelMonitorResponse.id, 2) assertFindings( @@ -2792,9 +3112,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { bucketLevelMonitorResponse.monitor.dataSources.findingsIndex, 1, 4, - listOf("3", "4", "5", "6") + listOf("3", "4", "5", "6"), ) } + // Verify third doc level monitor execution, alerts and findings docLevelMonitorResponse1.monitor.name -> { assertEquals(1, monitorRunResults.inputResults.results.size) @@ -2814,9 +3135,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { docLevelMonitorResponse1.monitor.dataSources.findingsIndex, 2, 2, - listOf("5", "6") + listOf("5", "6"), ) } + // Verify fourth query level monitor execution queryMonitorResponse.monitor.name -> { assertEquals(1, monitorRunResults.inputResults.results.size) @@ -2827,16 +3149,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { ( ( monitorRunResults.inputResults.results[0]["hits"] as kotlin.collections.Map - )["total"] as kotlin.collections.Map - )["value"] + )["total"] as kotlin.collections.Map + )["value"] assertEquals(2, totalHits) @Suppress("UNCHECKED_CAST") val docIds = ( ( monitorRunResults.inputResults.results[0]["hits"] as kotlin.collections.Map - )["hits"] as List> - ).map { it["_id"]!! } + )["hits"] as List> + ).map { it["_id"]!! } assertEquals(listOf("5", "6"), docIds.sorted()) } } @@ -2850,15 +3172,19 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { workflowId: String, ): GetAlertsResponse { val table = Table("asc", "id", null, alertSize, 0, "") - val getAlertsResponse = client() - .execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest( - table, "ALL", "ALL", monitorId, customAlertsIndex, - workflowIds = listOf(workflowId) - ) - ) - .get() + val getAlertsResponse = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest( + table, + "ALL", + "ALL", + monitorId, + customAlertsIndex, + workflowIds = listOf(workflowId), + ), + ).get() assertTrue(getAlertsResponse != null) assertTrue(getAlertsResponse.alerts.size == alertSize) return getAlertsResponse @@ -2871,15 +3197,17 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex1 = "custom_alerts_index" val customFindingsIndex1 = "custom_findings_index" val customFindingsIndexPattern1 = "custom_findings_index-1" - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1), - dataSources = DataSources( - alertsIndex = customAlertsIndex1, - findingsIndex = customFindingsIndex1, - findingsIndexPattern = customFindingsIndexPattern1 + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + dataSources = + DataSources( + alertsIndex = customAlertsIndex1, + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1, + ), ) - ) val monitorResponse = createMonitor(monitor1)!! val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4", fields = listOf()) @@ -2888,21 +3216,25 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex2 = "custom_alerts_index_2" val customFindingsIndex2 = "custom_findings_index_2" val customFindingsIndexPattern2 = "custom_findings_index-2" - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput2), - triggers = listOf(trigger2), - dataSources = DataSources( - alertsIndex = customAlertsIndex2, - findingsIndex = customFindingsIndex2, - findingsIndexPattern = customFindingsIndexPattern2 + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + dataSources = + DataSources( + alertsIndex = customAlertsIndex2, + findingsIndex = customFindingsIndex2, + findingsIndexPattern = customFindingsIndexPattern2, + ), ) - ) val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id), auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -2964,71 +3296,85 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex1 = "custom_alerts_index" val customFindingsIndex1 = "custom_findings_index" val customFindingsIndexPattern1 = "custom_findings_index-1" - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1), - enabled = false, - dataSources = DataSources( - alertsIndex = customAlertsIndex1, - findingsIndex = customFindingsIndex1, - findingsIndexPattern = customFindingsIndexPattern1 + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + enabled = false, + dataSources = + DataSources( + alertsIndex = customAlertsIndex1, + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1, + ), ) - ) val monitorResponse = createMonitor(monitor1)!! val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4", fields = listOf()) val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput2), - triggers = listOf(trigger2), - enabled = false, - dataSources = DataSources( - alertsIndex = customAlertsIndex1, - findingsIndex = customFindingsIndex1, - findingsIndexPattern = customFindingsIndexPattern1 + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + enabled = false, + dataSources = + DataSources( + alertsIndex = customAlertsIndex1, + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1, + ), ) - ) val monitorResponse2 = createMonitor(monitor2)!! val docQuery3 = DocLevelQuery(query = "_id:*", name = "5", fields = listOf()) val docLevelInput3 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery3)) val trigger3 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor3 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput3), - triggers = listOf(trigger3), - enabled = false, - dataSources = DataSources( - alertsIndex = customAlertsIndex1, - findingsIndex = customFindingsIndex1, - findingsIndexPattern = customFindingsIndexPattern1 + var monitor3 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput3), + triggers = listOf(trigger3), + enabled = false, + dataSources = + DataSources( + alertsIndex = customAlertsIndex1, + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1, + ), ) - ) val monitorResponse3 = createMonitor(monitor3)!! val d1 = Delegate(1, monitorResponse.id) val d2 = Delegate(2, monitorResponse2.id) - val d3 = Delegate( - 3, monitorResponse3.id, - ChainedMonitorFindings(null, listOf(monitorResponse.id, monitorResponse2.id)) - ) - var workflow = Workflow( - id = "", - name = "test", - enabled = false, - schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), - lastUpdateTime = Instant.now(), - enabledTime = null, - workflowType = Workflow.WorkflowType.COMPOSITE, - user = randomUser(), - inputs = listOf(CompositeInput(org.opensearch.commons.alerting.model.Sequence(listOf(d1, d2, d3)))), - version = -1L, - schemaVersion = 0, - triggers = emptyList(), - auditDelegateMonitorAlerts = false - - ) + val d3 = + Delegate( + 3, + monitorResponse3.id, + ChainedMonitorFindings(null, listOf(monitorResponse.id, monitorResponse2.id)), + ) + var workflow = + Workflow( + id = "", + name = "test", + enabled = false, + schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + lastUpdateTime = Instant.now(), + enabledTime = null, + workflowType = Workflow.WorkflowType.COMPOSITE, + user = randomUser(), + inputs = + listOf( + CompositeInput( + org.opensearch.commons.alerting.model + .Sequence(listOf(d1, d2, d3)), + ), + ), + version = -1L, + schemaVersion = 0, + triggers = emptyList(), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3079,29 +3425,33 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex = "custom_alerts_index" val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - alertsIndex = customAlertsIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + alertsIndex = customAlertsIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id), - auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) - var workflow1 = randomWorkflow( - monitorIds = listOf(monitorResponse.id), - auditDelegateMonitorAlerts = false - ) + var workflow1 = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + auditDelegateMonitorAlerts = false, + ) val workflowResponse1 = upsertWorkflow(workflow1)!! val workflowById1 = searchWorkflow(workflowResponse1.id) assertNotNull(workflowById1) @@ -3142,7 +3492,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Workflow metadata execution id not correct", executeWorkflowResponse.workflowRunResult.executionId, - workflowMetadata!!.latestExecutionId + workflowMetadata!!.latestExecutionId, ) val monitorMetadataId = getDelegateMonitorMetadataId(workflowMetadata, monitorResponse) val monitorMetadata = searchMonitorMetadata(monitorMetadataId) @@ -3166,7 +3516,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Workflow metadata execution id not correct", executeWorkflowResponse1.workflowRunResult.executionId, - workflowMetadata1!!.latestExecutionId + workflowMetadata1!!.latestExecutionId, ) val monitorMetadataId1 = getDelegateMonitorMetadataId(workflowMetadata1, monitorResponse) val monitorMetadata1 = searchMonitorMetadata(monitorMetadataId1) @@ -3181,22 +3531,27 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id), auditDelegateMonitorAlerts = false - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) - val workflow1 = randomWorkflow( - monitorIds = listOf(monitorResponse.id), auditDelegateMonitorAlerts = false - ) + val workflow1 = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + auditDelegateMonitorAlerts = false, + ) val workflowResponse1 = upsertWorkflow(workflow1)!! val workflowById1 = searchWorkflow(workflowResponse1.id) assertNotNull(workflowById1) @@ -3236,7 +3591,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Workflow metadata execution id not correct", executeWorkflowResponse.workflowRunResult.executionId, - workflowMetadata!!.latestExecutionId + workflowMetadata!!.latestExecutionId, ) val monitorMetadataId = getDelegateMonitorMetadataId(workflowMetadata, monitorResponse) val monitorMetadata = searchMonitorMetadata(monitorMetadataId) @@ -3247,14 +3602,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customFindingsIndexPattern = "custom_findings_index-1" val monitorId = monitorResponse.id updateMonitor( - monitor = monitor.copy( - dataSources = DataSources( - alertsIndex = customAlertsIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern - ) - ), - monitorId + monitor = + monitor.copy( + dataSources = + DataSources( + alertsIndex = customAlertsIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), + ), + monitorId, ) // Execute second workflow @@ -3277,7 +3634,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Workflow metadata execution id not correct", executeWorkflowResponse1.workflowRunResult.executionId, - workflowMetadata1!!.latestExecutionId + workflowMetadata1!!.latestExecutionId, ) val monitorMetadataId1 = getDelegateMonitorMetadataId(workflowMetadata1, monitorResponse) val monitorMetadata1 = searchMonitorMetadata(monitorMetadataId1) @@ -3291,25 +3648,28 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) val monitorResponse = createMonitor(monitor1)!! val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4", fields = listOf()) val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput2), - triggers = listOf(trigger2), - ) + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3334,7 +3694,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Workflow metadata execution id not correct", executeWorkflowResponse.workflowRunResult.executionId, - workflowMetadata!!.latestExecutionId + workflowMetadata!!.latestExecutionId, ) val monitorMetadataId = getDelegateMonitorMetadataId(workflowMetadata, monitorResponse) val monitorMetadata = searchMonitorMetadata(monitorMetadataId) @@ -3350,7 +3710,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Workflow metadata execution id not correct", executeWorkflowResponse1.workflowRunResult.executionId, - workflowMetadata1!!.latestExecutionId + workflowMetadata1!!.latestExecutionId, ) val monitorMetadataId1 = getDelegateMonitorMetadataId(workflowMetadata1, monitorResponse) assertTrue(monitorMetadataId == monitorMetadataId1) @@ -3362,25 +3722,28 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) val monitorResponse = createMonitor(monitor1)!! val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4", fields = listOf()) val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput2), - triggers = listOf(trigger2), - ) + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3412,47 +3775,55 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } fun `test execute workflow with custom alerts and finding index with bucket and doc monitor bucket monitor used as chained finding`() { - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null, + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val bucketCustomAlertsIndex = "custom_alerts_index" val bucketCustomFindingsIndex = "custom_findings_index" val bucketCustomFindingsIndexPattern = "custom_findings_index-1" - val bucketLevelMonitorResponse = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources( - findingsEnabled = true, - alertsIndex = bucketCustomAlertsIndex, - findingsIndex = bucketCustomFindingsIndex, - findingsIndexPattern = bucketCustomFindingsIndexPattern - ) - ) - )!! + val bucketLevelMonitorResponse = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = + DataSources( + findingsEnabled = true, + alertsIndex = bucketCustomAlertsIndex, + findingsIndex = bucketCustomFindingsIndex, + findingsIndexPattern = bucketCustomFindingsIndexPattern, + ), + ), + )!! val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1", fields = listOf()) val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_1\"", name = "2", fields = listOf()) @@ -3462,21 +3833,25 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docCustomAlertsIndex = "custom_alerts_index" val docCustomFindingsIndex = "custom_findings_index" val docCustomFindingsIndexPattern = "custom_findings_index-1" - var docLevelMonitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(docTrigger), - dataSources = DataSources( - alertsIndex = docCustomAlertsIndex, - findingsIndex = docCustomFindingsIndex, - findingsIndexPattern = docCustomFindingsIndexPattern + var docLevelMonitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(docTrigger), + dataSources = + DataSources( + alertsIndex = docCustomAlertsIndex, + findingsIndex = docCustomFindingsIndex, + findingsIndexPattern = docCustomFindingsIndexPattern, + ), ) - ) val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) - var workflow = randomWorkflow( - monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id), auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3489,8 +3864,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_value_1", // adding duplicate to verify aggregation "test_value_2", "test_value_2", - "test_value_3" - ) + "test_value_3", + ), ) val workflowId = workflowResponse.id @@ -3504,8 +3879,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val searchResult = monitorRunResults.inputResults.results.first() @Suppress("UNCHECKED_CAST") - val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg") - ?.get("buckets") as List> + val buckets = + searchResult + .stringMap("aggregations") + ?.stringMap("composite_agg") + ?.get("buckets") as List> assertEquals("Incorrect search result", 3, buckets.size) val getAlertsResponse = assertAlerts(bucketLevelMonitorResponse.id, bucketCustomAlertsIndex, alertSize = 2, workflowId) @@ -3532,68 +3910,78 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertIndex = "custom-alert-index" val customAlertHistoryIndex = "custom-alert-history-index" val customAlertHistoryIndexPattern = "" - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null, + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) - val bucketLevelMonitorResponse = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources( - alertsIndex = customAlertIndex, - alertsHistoryIndexPattern = customAlertHistoryIndexPattern, - alertsHistoryIndex = customAlertHistoryIndex - - ) - ) - )!! - - val bucketLevelMonitorResponse2 = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources( - alertsIndex = customAlertIndex, - alertsHistoryIndexPattern = customAlertHistoryIndexPattern, - alertsHistoryIndex = customAlertHistoryIndex + val bucketLevelMonitorResponse = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = + DataSources( + alertsIndex = customAlertIndex, + alertsHistoryIndexPattern = customAlertHistoryIndexPattern, + alertsHistoryIndex = customAlertHistoryIndex, + ), + ), + )!! + + val bucketLevelMonitorResponse2 = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = + DataSources( + alertsIndex = customAlertIndex, + alertsHistoryIndexPattern = customAlertHistoryIndexPattern, + alertsHistoryIndex = customAlertHistoryIndex, + ), + ), + )!! - ) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${bucketLevelMonitorResponse.id}] && monitor[id=${bucketLevelMonitorResponse2.id}]"), ) - )!! - - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${bucketLevelMonitorResponse.id}] && monitor[id=${bucketLevelMonitorResponse2.id}]") - ) // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) - var workflow = randomWorkflow( - monitorIds = listOf(bucketLevelMonitorResponse.id, bucketLevelMonitorResponse2.id), - triggers = listOf(andTrigger) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(bucketLevelMonitorResponse.id, bucketLevelMonitorResponse2.id), + triggers = listOf(andTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3606,8 +3994,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_value_1", // adding duplicate to verify aggregation "test_value_2", "test_value_2", - "test_value_3" - ) + "test_value_3", + ), ) val workflowId = workflowResponse.id @@ -3620,72 +4008,84 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertTrue(executeWorkflowResponse.workflowRunResult.triggerResults.containsKey(andTrigger.id)) Assert.assertTrue(executeWorkflowResponse.workflowRunResult.triggerResults[andTrigger.id]!!.triggered) - val auditStateAlerts = getAuditStateAlerts( - alertsIndex = customAlertHistoryIndex, - monitorId = bucketLevelMonitorResponse.id, - executionId = executeWorkflowResponse.workflowRunResult.executionId - ) + val auditStateAlerts = + getAuditStateAlerts( + alertsIndex = customAlertHistoryIndex, + monitorId = bucketLevelMonitorResponse.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + ) Assert.assertEquals(auditStateAlerts.size, 2) - val auditStateAlerts2 = getAuditStateAlerts( - alertsIndex = customAlertHistoryIndex, - monitorId = bucketLevelMonitorResponse2.id, - executionId = executeWorkflowResponse.workflowRunResult.executionId - ) + val auditStateAlerts2 = + getAuditStateAlerts( + alertsIndex = customAlertHistoryIndex, + monitorId = bucketLevelMonitorResponse2.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + ) Assert.assertEquals(auditStateAlerts2.size, 2) } fun `test chained alerts for bucket level monitors generating audit alerts`() { - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null, + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) - val bucketLevelMonitorResponse = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger) - ) - )!! + val bucketLevelMonitorResponse = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + ), + )!! + + val bucketLevelMonitorResponse2 = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + ), + )!! - val bucketLevelMonitorResponse2 = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${bucketLevelMonitorResponse.id}] && monitor[id=${bucketLevelMonitorResponse2.id}]"), ) - )!! - - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${bucketLevelMonitorResponse.id}] && monitor[id=${bucketLevelMonitorResponse2.id}]") - ) // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) - var workflow = randomWorkflow( - monitorIds = listOf(bucketLevelMonitorResponse.id, bucketLevelMonitorResponse2.id), - triggers = listOf(andTrigger) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(bucketLevelMonitorResponse.id, bucketLevelMonitorResponse2.id), + triggers = listOf(andTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3698,8 +4098,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_value_1", // adding duplicate to verify aggregation "test_value_2", "test_value_2", - "test_value_3" - ) + "test_value_3", + ), ) val workflowId = workflowResponse.id @@ -3712,18 +4112,20 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertTrue(executeWorkflowResponse.workflowRunResult.triggerResults.containsKey(andTrigger.id)) Assert.assertTrue(executeWorkflowResponse.workflowRunResult.triggerResults[andTrigger.id]!!.triggered) - val auditStateAlerts = getAuditStateAlerts( - alertsIndex = bucketLevelMonitorResponse.monitor.dataSources.alertsHistoryIndex, - monitorId = bucketLevelMonitorResponse.id, - executionId = executeWorkflowResponse.workflowRunResult.executionId - ) + val auditStateAlerts = + getAuditStateAlerts( + alertsIndex = bucketLevelMonitorResponse.monitor.dataSources.alertsHistoryIndex, + monitorId = bucketLevelMonitorResponse.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + ) Assert.assertEquals(auditStateAlerts.size, 2) - val auditStateAlerts2 = getAuditStateAlerts( - alertsIndex = bucketLevelMonitorResponse.monitor.dataSources.alertsHistoryIndex, - monitorId = bucketLevelMonitorResponse2.id, - executionId = executeWorkflowResponse.workflowRunResult.executionId - ) + val auditStateAlerts2 = + getAuditStateAlerts( + alertsIndex = bucketLevelMonitorResponse.monitor.dataSources.alertsHistoryIndex, + monitorId = bucketLevelMonitorResponse2.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + ) Assert.assertEquals(auditStateAlerts2.size, 2) } @@ -3731,99 +4133,116 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1", fields = listOf()) val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_3\"", name = "2", fields = listOf()) - var docLevelMonitor = randomDocumentLevelMonitor( - inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2))), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), - dataSources = DataSources( - alertsIndex = "custom_alerts_index", - findingsIndex = "custom_findings_index", - findingsIndexPattern = "custom_findings_index-1" + var docLevelMonitor = + randomDocumentLevelMonitor( + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery1, docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = + DataSources( + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1", + ), ) - ) val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Bucket level monitor will reduce the size of matched doc ids on those that belong to a bucket that contains more than 1 document after term grouping - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null, - ) - ) - - val bucketLevelMonitorResponse = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources( - findingsEnabled = true, - alertsIndex = "custom_alerts_index", - findingsIndex = "custom_findings_index", - findingsIndexPattern = "custom_findings_index-1" - ) + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - )!! - var docLevelMonitor1 = randomDocumentLevelMonitor( - // Match the documents with test_field_1: test_value_3 - inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery2))), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), - dataSources = DataSources( - findingsEnabled = true, - alertsIndex = "custom_alerts_index_1", - findingsIndex = "custom_findings_index_1", - findingsIndexPattern = "custom_findings_index_1-1" + val bucketLevelMonitorResponse = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = + DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index", + findingsIndex = "custom_findings_index", + findingsIndexPattern = "custom_findings_index-1", + ), + ), + )!! + + var docLevelMonitor1 = + randomDocumentLevelMonitor( + // Match the documents with test_field_1: test_value_3 + inputs = listOf(DocLevelMonitorInput("description", listOf(index), listOf(docQuery2))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + dataSources = + DataSources( + findingsEnabled = true, + alertsIndex = "custom_alerts_index_1", + findingsIndex = "custom_findings_index_1", + findingsIndexPattern = "custom_findings_index_1-1", + ), ) - ) val docLevelMonitorResponse1 = createMonitor(docLevelMonitor1)!! - val queryMonitorInput = SearchInput( - indices = listOf(index), - query = SearchSourceBuilder().query( - QueryBuilders - .rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val queryMonitorInput = + SearchInput( + indices = listOf(index), + query = + SearchSourceBuilder().query( + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis"), + ), ) - ) - val queryTriggerScript = """ + val queryTriggerScript = + """ return ctx.results[0].hits.hits.size() > 0 - """.trimIndent() + """.trimIndent() val queryLevelTrigger = randomQueryLevelTrigger(condition = Script(queryTriggerScript)) val queryMonitorResponse = createMonitor(randomQueryLevelMonitor(inputs = listOf(queryMonitorInput), triggers = listOf(queryLevelTrigger)))!! // 1. docMonitor (chainedFinding = null) 2. bucketMonitor (chainedFinding = docMonitor) 3. docMonitor (chainedFinding = bucketMonitor) 4. queryMonitor (chainedFinding = docMonitor 3) - var workflow = randomWorkflow( - monitorIds = listOf( - docLevelMonitorResponse.id, - bucketLevelMonitorResponse.id, - docLevelMonitorResponse1.id, - queryMonitorResponse.id - ), - auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = + listOf( + docLevelMonitorResponse.id, + bucketLevelMonitorResponse.id, + docLevelMonitorResponse1.id, + queryMonitorResponse.id, + ), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3837,8 +4256,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_value_2", "test_value_2", "test_value_3", - "test_value_3" - ) + "test_value_3", + ), ) val workflowId = workflowResponse.id @@ -3865,7 +4284,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { docLevelMonitorResponse.id, docLevelMonitorResponse.monitor.dataSources.alertsIndex, alertSize = 4, - workflowId = workflowId + workflowId = workflowId, ) assertAcknowledges(getAlertsResponse.alerts, docLevelMonitorResponse.id, 4) assertFindings( @@ -3873,9 +4292,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { docLevelMonitorResponse.monitor.dataSources.findingsIndex, 4, 4, - listOf("3", "4", "5", "6") + listOf("3", "4", "5", "6"), ) } + // Verify second bucket level monitor execution, alerts and findings bucketLevelMonitorResponse.monitor.name -> { val searchResult = monitorRunResults.inputResults.results.first() @@ -3883,7 +4303,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { @Suppress("UNCHECKED_CAST") val buckets = searchResult - .stringMap("aggregations")?.stringMap("composite_agg") + .stringMap("aggregations") + ?.stringMap("composite_agg") ?.get("buckets") as List> assertEquals("Incorrect search result", 2, buckets.size) @@ -3892,7 +4313,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { bucketLevelMonitorResponse.id, bucketLevelMonitorResponse.monitor.dataSources.alertsIndex, alertSize = 2, - workflowId + workflowId, ) assertAcknowledges(getAlertsResponse.alerts, bucketLevelMonitorResponse.id, 2) assertFindings( @@ -3900,9 +4321,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { bucketLevelMonitorResponse.monitor.dataSources.findingsIndex, 1, 4, - listOf("3", "4", "5", "6") + listOf("3", "4", "5", "6"), ) } + // Verify third doc level monitor execution, alerts and findings docLevelMonitorResponse1.monitor.name -> { assertEquals(1, monitorRunResults.inputResults.results.size) @@ -3919,7 +4341,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { docLevelMonitorResponse1.id, docLevelMonitorResponse1.monitor.dataSources.alertsIndex, alertSize = 2, - workflowId + workflowId, ) assertAcknowledges(getAlertsResponse.alerts, docLevelMonitorResponse1.id, 2) assertFindings( @@ -3927,9 +4349,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { docLevelMonitorResponse1.monitor.dataSources.findingsIndex, 2, 2, - listOf("5", "6") + listOf("5", "6"), ) } + // Verify fourth query level monitor execution queryMonitorResponse.monitor.name -> { assertEquals(1, monitorRunResults.inputResults.results.size) @@ -3941,8 +4364,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { ( monitorRunResults.inputResults.results[0]["hits"] as kotlin.collections.Map - )["total"] as kotlin.collections.Map - )["value"] + )["total"] as kotlin.collections.Map + )["value"] assertEquals(2, totalHits) @Suppress("UNCHECKED_CAST") val docIds = @@ -3950,9 +4373,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { ( monitorRunResults.inputResults.results[0]["hits"] as kotlin.collections.Map - )["hits"] as List> - ) - .map { it["_id"]!! } + )["hits"] as List> + ).map { it["_id"]!! } assertEquals(listOf("5", "6"), docIds.sorted()) } } @@ -3960,20 +4382,26 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } fun `test execute workflow input error`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id), auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -3989,21 +4417,26 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } fun `test execute workflow wrong workflow id`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) @@ -4048,15 +4481,19 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val searchRequest = SearchRequest(alertsIndex) val boolQueryBuilder = QueryBuilders.boolQuery() boolQueryBuilder.must(TermQueryBuilder("monitor_id", monitorId)) - if (executionId.isNullOrEmpty() == false) + if (executionId.isNullOrEmpty() == false) { boolQueryBuilder.must(TermQueryBuilder("execution_id", executionId)) + } searchRequest.source().query(boolQueryBuilder) val searchResponse = client().search(searchRequest).get() return searchResponse.hits.map { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry(), LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry(), + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) val alert = Alert.parse(xcp, hit.id, hit.version) alert @@ -4073,20 +4510,20 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val alerts = searchAlerts(monitorId, alertsIndex!!, executionId = executionId) assertEquals("Alert saved for test monitor", alertSize, alerts.size) val table = Table("asc", "id", null, alertSize, 0, "") - var getAlertsResponse = client() - .execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest(table, "ALL", "ALL", null, alertsIndex) - ) - .get() + var getAlertsResponse = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", null, alertsIndex), + ).get() assertTrue(getAlertsResponse != null) assertTrue(getAlertsResponse.alerts.size == alertSize) - getAlertsResponse = client() - .execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest(table, "ALL", "ALL", monitorId, null, workflowIds = listOf(workflowId)) - ) - .get() + getAlertsResponse = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest(table, "ALL", "ALL", monitorId, null, workflowIds = listOf(workflowId)), + ).get() assertTrue(getAlertsResponse != null) assertTrue(getAlertsResponse.alerts.size == alertSize) @@ -4099,10 +4536,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { alertSize: Int, ) { val alertIds = alerts.map { it.id } - val acknowledgeAlertResponse = client().execute( - AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, - AcknowledgeAlertRequest(monitorId, alertIds, WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() + val acknowledgeAlertResponse = + client() + .execute( + AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, + AcknowledgeAlertRequest(monitorId, alertIds, WriteRequest.RefreshPolicy.IMMEDIATE), + ).get() assertEquals(alertSize, acknowledgeAlertResponse.acknowledged.size) } @@ -4114,23 +4553,36 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { ) { val alertIds = alerts.map { it.id }.toMutableList() val acknowledgeAlertResponse = ackChainedAlerts(alertIds, workflowId) - assertTrue(acknowledgeAlertResponse.acknowledged.stream().map { it.id }.collect(Collectors.toList()).containsAll(alertIds)) + assertTrue( + acknowledgeAlertResponse.acknowledged + .stream() + .map { it.id } + .collect(Collectors.toList()) + .containsAll(alertIds), + ) assertEquals(alertSize, acknowledgeAlertResponse.acknowledged.size) alertIds.add("dummy") val redundantAck = ackChainedAlerts(alertIds, workflowId) Assert.assertTrue(redundantAck.acknowledged.isEmpty()) Assert.assertTrue(redundantAck.missing.contains("dummy")) alertIds.remove("dummy") - Assert.assertTrue(redundantAck.failed.map { it.id }.toList().containsAll(alertIds)) + Assert.assertTrue( + redundantAck.failed + .map { it.id } + .toList() + .containsAll(alertIds), + ) } - private fun ackChainedAlerts(alertIds: List, workflowId: String): AcknowledgeAlertResponse { - - return client().execute( - AlertingActions.ACKNOWLEDGE_CHAINED_ALERTS_ACTION_TYPE, - AcknowledgeChainedAlertRequest(workflowId, alertIds) - ).get() - } + private fun ackChainedAlerts( + alertIds: List, + workflowId: String, + ): AcknowledgeAlertResponse = + client() + .execute( + AlertingActions.ACKNOWLEDGE_CHAINED_ALERTS_ACTION_TYPE, + AcknowledgeChainedAlertRequest(workflowId, alertIds), + ).get() private fun assertAuditStateAlerts( monitorId: String, @@ -4138,10 +4590,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { ) { alerts.forEach { Assert.assertEquals(it.state, Alert.State.AUDIT) } val alertIds = alerts.stream().map { it.id }.collect(Collectors.toList()) - val ack = client().execute( - AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, - AcknowledgeAlertRequest(monitorId, alertIds, WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() + val ack = + client() + .execute( + AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, + AcknowledgeAlertRequest(monitorId, alertIds, WriteRequest.RefreshPolicy.IMMEDIATE), + ).get() Assert.assertTrue(ack.acknowledged.isEmpty()) Assert.assertTrue(ack.missing.containsAll(alertIds)) Assert.assertTrue(ack.failed.isEmpty()) @@ -4150,43 +4604,50 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test execute workflow with bucket-level and doc-level chained monitors`() { createTestIndex(TEST_HR_INDEX) - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) - val input = SearchInput( - indices = listOf(TEST_HR_INDEX), - query = SearchSourceBuilder().size(0).query(QueryBuilders.matchAllQuery()).aggregation(compositeAgg) - ) - val triggerScript = """ + val input = + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().size(0).query(QueryBuilders.matchAllQuery()).aggregation(compositeAgg), + ) + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null - ), - actions = listOf() - ) - val bucketMonitor = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger) + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), + actions = listOf(), + ) + val bucketMonitor = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + ), ) - ) assertNotNull("The bucket monitor was not created", bucketMonitor) val docQuery1 = DocLevelQuery(query = "test_field_1:\"a\"", name = "3", fields = listOf()) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(DocLevelMonitorInput("description", listOf(TEST_HR_INDEX), listOf(docQuery1))), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(DocLevelMonitorInput("description", listOf(TEST_HR_INDEX), listOf(docQuery1))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + ) val docMonitor = createMonitor(monitor1)!! assertNotNull("The doc level monitor was not created", docMonitor) @@ -4203,7 +4664,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_field_1": "a", "accessible": true } - """.trimIndent() + """.trimIndent(), ) // Add a second doc that is not accessible to the user @@ -4215,7 +4676,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_field_1": "b", "accessible": false } - """.trimIndent() + """.trimIndent(), ) indexDoc( @@ -4226,7 +4687,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_field_1": "c", "accessible": true } - """.trimIndent() + """.trimIndent(), ) val executeResult = executeWorkflow(id = workflowResponse!!.id) @@ -4243,17 +4704,19 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val customAlertsIndex = "custom_alerts_index" val customAlertsHistoryIndex = "custom_alerts_history_index" val customAlertsHistoryIndexPattern = "" - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1), - dataSources = DataSources( - findingsIndex = customFindingsIndex1, - findingsIndexPattern = customFindingsIndexPattern1, - alertsIndex = customAlertsIndex, - alertsHistoryIndex = customAlertsHistoryIndex, - alertsHistoryIndexPattern = customAlertsHistoryIndexPattern + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + dataSources = + DataSources( + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1, + alertsIndex = customAlertsIndex, + alertsHistoryIndex = customAlertsHistoryIndex, + alertsHistoryIndexPattern = customAlertsHistoryIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor1)!! val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4", fields = listOf()) @@ -4261,31 +4724,36 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex2 = "custom_findings_index_2" val customFindingsIndexPattern2 = "custom_findings_index-2" - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput2), - triggers = listOf(trigger2), - dataSources = DataSources( - findingsIndex = customFindingsIndex2, - findingsIndexPattern = customFindingsIndexPattern2, - alertsIndex = customAlertsIndex, - alertsHistoryIndex = customAlertsHistoryIndex, - alertsHistoryIndexPattern = customAlertsHistoryIndexPattern + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + dataSources = + DataSources( + findingsIndex = customFindingsIndex2, + findingsIndexPattern = customFindingsIndexPattern2, + alertsIndex = customAlertsIndex, + alertsHistoryIndex = customAlertsHistoryIndex, + alertsHistoryIndexPattern = customAlertsHistoryIndexPattern, + ), ) - ) val monitorResponse2 = createMonitor(monitor2)!! - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]") - ) - val notTrigger = randomChainedAlertTrigger( - name = "Not1OrNot2", - condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]") - ) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id), - triggers = listOf(andTrigger, notTrigger) - ) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]"), + ) + val notTrigger = + randomChainedAlertTrigger( + name = "Not1OrNot2", + condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]"), + ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + triggers = listOf(andTrigger, notTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -4366,20 +4834,24 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals( monitor1.dataSources.alertsHistoryIndex, - CompositeWorkflowRunner.getDelegateMonitorAlertIndex(dataSources = monitor1.dataSources, workflow, true) - ) - val alerts = getAuditStateAlerts( - monitorId = monitorResponse.id, executionId = executeWorkflowResponse.workflowRunResult.executionId, - alertsIndex = monitor1.dataSources.alertsHistoryIndex, + CompositeWorkflowRunner.getDelegateMonitorAlertIndex(dataSources = monitor1.dataSources, workflow, true), ) + val alerts = + getAuditStateAlerts( + monitorId = monitorResponse.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + alertsIndex = monitor1.dataSources.alertsHistoryIndex, + ) assertAuditStateAlerts(monitorResponse.id, alerts) assertFindings(monitorResponse.id, customFindingsIndex1, 2, 2, listOf("1", "2")) val associatedAlertIds = res.associatedAlerts.map { it.id }.toList() associatedAlertIds.containsAll(alerts.map { it.id }.toList()) - val alerts1 = getAuditStateAlerts( - alertsIndex = monitor2.dataSources.alertsHistoryIndex, monitorId = monitorResponse2.id, - executionId = executeWorkflowResponse.workflowRunResult.executionId, - ) + val alerts1 = + getAuditStateAlerts( + alertsIndex = monitor2.dataSources.alertsHistoryIndex, + monitorId = monitorResponse2.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + ) assertAuditStateAlerts(monitorResponse2.id, alerts1) assertFindings(monitorResponse2.id, customFindingsIndex2, 1, 1, listOf("2")) associatedAlertIds.containsAll(alerts1.map { it.id }.toList()) @@ -4392,14 +4864,16 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex1 = "custom_findings_index" val customFindingsIndexPattern1 = "custom_findings_index-1" - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1), - dataSources = DataSources( - findingsIndex = customFindingsIndex1, - findingsIndexPattern = customFindingsIndexPattern1 + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + dataSources = + DataSources( + findingsIndex = customFindingsIndex1, + findingsIndexPattern = customFindingsIndexPattern1, + ), ) - ) val monitorResponse = createMonitor(monitor1)!! val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4", fields = listOf()) @@ -4407,28 +4881,33 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex2 = "custom_findings_index_2" val customFindingsIndexPattern2 = "custom_findings_index-2" - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput2), - triggers = listOf(trigger2), - dataSources = DataSources( - findingsIndex = customFindingsIndex2, - findingsIndexPattern = customFindingsIndexPattern2 + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + dataSources = + DataSources( + findingsIndex = customFindingsIndex2, + findingsIndexPattern = customFindingsIndexPattern2, + ), ) - ) val monitorResponse2 = createMonitor(monitor2)!! - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]") - ) - val notTrigger = randomChainedAlertTrigger( - name = "Not1OrNot2", - condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]") - ) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id), - triggers = listOf(andTrigger, notTrigger) - ) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]"), + ) + val notTrigger = + randomChainedAlertTrigger( + name = "Not1OrNot2", + condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]"), + ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + triggers = listOf(andTrigger, notTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -4443,27 +4922,30 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { var notTriggerResult = triggerResults[notTrigger.id] Assert.assertTrue(notTriggerResult!!.triggered) Assert.assertFalse(andTriggerResult!!.triggered) - var res = getWorkflowAlerts( - workflowId, - ) + var res = + getWorkflowAlerts( + workflowId, + ) var chainedAlerts = res.alerts Assert.assertTrue(chainedAlerts.size == 1) // verify get alerts api with defaults set in query params returns only chained alerts and not audit alerts val table = Table("asc", "id", null, 1, 0, "") - val getAlertsDefaultParamsResponse = client().execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest( - table = table, - severityLevel = "ALL", - alertState = "ALL", - monitorId = null, - alertIndex = null, - monitorIds = null, - workflowIds = null, - alertIds = null - ) - ).get() + val getAlertsDefaultParamsResponse = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest( + table = table, + severityLevel = "ALL", + alertState = "ALL", + monitorId = null, + alertIndex = null, + monitorIds = null, + workflowIds = null, + alertIds = null, + ), + ).get() Assert.assertEquals(getAlertsDefaultParamsResponse.alerts.size, 1) Assert.assertTrue(res.associatedAlerts.isEmpty()) verifyAcknowledgeChainedAlerts(chainedAlerts, workflowId, 1) @@ -4508,19 +4990,21 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { notTriggerResult = triggerResults[notTrigger.id] Assert.assertFalse(notTriggerResult!!.triggered) Assert.assertTrue(andTriggerResult!!.triggered) - val getAuditAlertsForMonitor1 = client().execute( - AlertingActions.GET_ALERTS_ACTION_TYPE, - GetAlertsRequest( - table = table, - severityLevel = "ALL", - alertState = "AUDIT", - monitorId = monitorResponse.id, - alertIndex = null, - monitorIds = null, - workflowIds = listOf(workflowId), - alertIds = null - ) - ).get() + val getAuditAlertsForMonitor1 = + client() + .execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + GetAlertsRequest( + table = table, + severityLevel = "ALL", + alertState = "AUDIT", + monitorId = monitorResponse.id, + alertIndex = null, + monitorIds = null, + workflowIds = listOf(workflowId), + alertIds = null, + ), + ).get() Assert.assertEquals(getAuditAlertsForMonitor1.alerts.size, 1) res = getWorkflowAlerts(workflowId) chainedAlerts = res.alerts @@ -4540,21 +5024,25 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertEquals( monitor1.dataSources.alertsHistoryIndex, - CompositeWorkflowRunner.getDelegateMonitorAlertIndex(dataSources = monitor1.dataSources, workflow, true) - ) - val alerts = getAuditStateAlerts( - alertsIndex = monitor1.dataSources.alertsHistoryIndex, monitorId = monitorResponse.id, - executionId = executeWorkflowResponse.workflowRunResult.executionId + CompositeWorkflowRunner.getDelegateMonitorAlertIndex(dataSources = monitor1.dataSources, workflow, true), ) + val alerts = + getAuditStateAlerts( + alertsIndex = monitor1.dataSources.alertsHistoryIndex, + monitorId = monitorResponse.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + ) val associatedAlertIds = res.associatedAlerts.map { it.id }.toList() associatedAlertIds.containsAll(alerts.map { it.id }.toList()) assertAuditStateAlerts(monitorResponse.id, alerts) assertFindings(monitorResponse.id, customFindingsIndex1, 2, 2, listOf("1", "2")) - val alerts1 = getAuditStateAlerts( - alertsIndex = monitor2.dataSources.alertsHistoryIndex, monitorId = monitorResponse2.id, - executionId = executeWorkflowResponse.workflowRunResult.executionId - ) + val alerts1 = + getAuditStateAlerts( + alertsIndex = monitor2.dataSources.alertsHistoryIndex, + monitorId = monitorResponse2.id, + executionId = executeWorkflowResponse.workflowRunResult.executionId, + ) associatedAlertIds.containsAll(alerts1.map { it.id }.toList()) assertAuditStateAlerts(monitorResponse2.id, alerts1) assertFindings(monitorResponse2.id, customFindingsIndex2, 1, 1, listOf("2")) @@ -4572,39 +5060,47 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test create workflow success`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) @@ -4637,45 +5133,55 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals("Delegate2 order not correct", 2, delegate2.order) assertEquals("Delegate2 id not correct", monitorResponse2.id, delegate2.monitorId) assertEquals( - "Delegate2 Chained finding not correct", monitorResponse1.id, delegate2.chainedMonitorFindings!!.monitorId + "Delegate2 Chained finding not correct", + monitorResponse1.id, + delegate2.chainedMonitorFindings!!.monitorId, ) } fun `test update workflow add monitor success`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) @@ -4686,24 +5192,27 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { var workflowById = searchWorkflow(workflowResponse.id)!! assertNotNull(workflowById) - val monitor3 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor3 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse3 = createMonitor(monitor3)!! - val updatedWorkflowResponse = upsertWorkflow( - randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id, monitorResponse3.id) - ), - workflowResponse.id, - RestRequest.Method.PUT - )!! + val updatedWorkflowResponse = + upsertWorkflow( + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id, monitorResponse3.id), + ), + workflowResponse.id, + RestRequest.Method.PUT, + )!! assertNotNull("Workflow creation failed", updatedWorkflowResponse) assertNotNull(updatedWorkflowResponse.workflow) @@ -4734,7 +5243,9 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals("Delegate2 order not correct", 2, delegate2.order) assertEquals("Delegate2 id not correct", monitorResponse2.id, delegate2.monitorId) assertEquals( - "Delegate2 Chained finding not correct", monitorResponse1.id, delegate2.chainedMonitorFindings!!.monitorId + "Delegate2 Chained finding not correct", + monitorResponse1.id, + delegate2.chainedMonitorFindings!!.monitorId, ) val delegate3 = delegates[2] @@ -4742,45 +5253,55 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals("Delegate3 order not correct", 3, delegate3.order) assertEquals("Delegate3 id not correct", monitorResponse3.id, delegate3.monitorId) assertEquals( - "Delegate3 Chained finding not correct", monitorResponse2.id, delegate3.chainedMonitorFindings!!.monitorId + "Delegate3 Chained finding not correct", + monitorResponse2.id, + delegate3.chainedMonitorFindings!!.monitorId, ) } fun `test update workflow change order of delegate monitors`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) @@ -4791,13 +5312,14 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { var workflowById = searchWorkflow(workflowResponse.id)!! assertNotNull(workflowById) - val updatedWorkflowResponse = upsertWorkflow( - randomWorkflow( - monitorIds = listOf(monitorResponse2.id, monitorResponse1.id) - ), - workflowResponse.id, - RestRequest.Method.PUT - )!! + val updatedWorkflowResponse = + upsertWorkflow( + randomWorkflow( + monitorIds = listOf(monitorResponse2.id, monitorResponse1.id), + ), + workflowResponse.id, + RestRequest.Method.PUT, + )!! assertNotNull("Workflow creation failed", updatedWorkflowResponse) assertNotNull(updatedWorkflowResponse.workflow) @@ -4828,45 +5350,55 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals("Delegate2 order not correct", 2, delegate2.order) assertEquals("Delegate2 id not correct", monitorResponse1.id, delegate2.monitorId) assertEquals( - "Delegate2 Chained finding not correct", monitorResponse2.id, delegate2.chainedMonitorFindings!!.monitorId + "Delegate2 Chained finding not correct", + monitorResponse2.id, + delegate2.chainedMonitorFindings!!.monitorId, ) } fun `test update workflow remove monitor success`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) @@ -4877,13 +5409,14 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { var workflowById = searchWorkflow(workflowResponse.id)!! assertNotNull(workflowById) - val updatedWorkflowResponse = upsertWorkflow( - randomWorkflow( - monitorIds = listOf(monitorResponse1.id) - ), - workflowResponse.id, - RestRequest.Method.PUT - )!! + val updatedWorkflowResponse = + upsertWorkflow( + randomWorkflow( + monitorIds = listOf(monitorResponse1.id), + ), + workflowResponse.id, + RestRequest.Method.PUT, + )!! assertNotNull("Workflow creation failed", updatedWorkflowResponse) assertNotNull(updatedWorkflowResponse.workflow) @@ -4912,28 +5445,34 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test update workflow doesn't exist failure`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse1 = createMonitor(monitor1)!! - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) @@ -4943,27 +5482,32 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow with testId is not found") + it.contains("Workflow with testId is not found"), ) } } } fun `test get workflow`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflowRequest)!! assertNotNull("Workflow creation failed", workflowResponse) @@ -5001,21 +5545,25 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found") + it.contains("Workflow not found"), ) } } } fun `test get workflow for invalid id monitor index exists`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) createMonitor(monitor) // Get workflow for non existing workflow id try { @@ -5024,28 +5572,33 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found") + it.contains("Workflow not found"), ) } } } fun `test delete workflow keeping delegate monitor`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) @@ -5064,7 +5617,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } @@ -5080,28 +5633,33 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Expected 0 hits for searchMonitorMetadata, got non-0 results.", - it.contains("List is empty") + it.contains("List is empty"), ) } } } fun `test delete workflow delegate monitor deleted`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) @@ -5120,7 +5678,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } @@ -5131,7 +5689,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetMonitor Action error ", - it.contains("Monitor not found") + it.contains("Monitor not found"), ) } } @@ -5143,7 +5701,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Expected 0 hits for searchMonitorMetadata, got non-0 results.", - it.contains("List is empty") + it.contains("List is empty"), ) } } @@ -5153,25 +5711,28 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) val monitorResponse = createMonitor(monitor1)!! val docQuery2 = DocLevelQuery(query = "source.ip.v6.v2:16645", name = "4", fields = listOf()) val docLevelInput2 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery2)) val trigger2 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput2), - triggers = listOf(trigger2), - ) + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput2), + triggers = listOf(trigger2), + ) val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -5212,7 +5773,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } @@ -5224,7 +5785,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetMonitor Action error ", - it.contains("List is empty") + it.contains("List is empty"), ) } } @@ -5236,7 +5797,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetMonitor Action error ", - it.contains("List is empty") + it.contains("List is empty"), ) } } @@ -5248,28 +5809,33 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetMonitor Action error ", - it.contains("List is empty") + it.contains("List is empty"), ) } } } fun `test delete workflow delegate monitor part of another workflow not deleted`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) @@ -5277,9 +5843,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertNotNull(getWorkflowResponse) assertEquals(workflowId, getWorkflowResponse.id) - val workflowRequest2 = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest2 = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse2 = upsertWorkflow(workflowRequest2)!! val workflowId2 = workflowResponse2.id val getWorkflowResponse2 = getWorkflowById(id = workflowResponse2.id) @@ -5293,7 +5860,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("[Not allowed to delete ${monitorResponse.id} monitors") + it.contains("[Not allowed to delete ${monitorResponse.id} monitors"), ) } } @@ -5302,21 +5869,26 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { } fun `test trying to delete monitor that is part of workflow sequence`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id @@ -5332,7 +5904,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning DeleteMonitor Action error ", - it.contains("Monitor can't be deleted because it is a part of workflow(s)") + it.contains("Monitor can't be deleted because it is a part of workflow(s)"), ) } } @@ -5346,22 +5918,26 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning DeleteWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } } fun `test delete workflow for invalid id monitor index exists`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) createMonitor(monitor) // Try deleting non-existing workflow try { @@ -5370,23 +5946,24 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning DeleteWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } } fun `test create workflow without delegate failure`() { - val workflow = randomWorkflow( - monitorIds = Collections.emptyList() - ) + val workflow = + randomWorkflow( + monitorIds = Collections.emptyList(), + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Delegates list can not be empty.") + it.contains("Delegates list can not be empty."), ) } } @@ -5397,162 +5974,188 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { for (i in 0..25) { monitorsIds.add(UUID.randomUUID().toString()) } - val workflow = randomWorkflow( - monitorIds = monitorsIds - ) + val workflow = + randomWorkflow( + monitorIds = monitorsIds, + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Delegates list can not be larger then 25.") + it.contains("Delegates list can not be larger then 25."), ) } } } fun `test update workflow without delegate failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - ) + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflow( - id = workflowResponse.id, - monitorIds = Collections.emptyList() - ) + workflow = + randomWorkflow( + id = workflowResponse.id, + monitorIds = Collections.emptyList(), + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Delegates list can not be empty.") + it.contains("Delegates list can not be empty."), ) } } } fun `test create workflow duplicate delegate failure`() { - val workflow = randomWorkflow( - monitorIds = listOf("1", "1", "2") - ) + val workflow = + randomWorkflow( + monitorIds = listOf("1", "1", "2"), + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Duplicate delegates not allowed") + it.contains("Duplicate delegates not allowed"), ) } } } fun `test update workflow duplicate delegate failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflow( - id = workflowResponse.id, - monitorIds = listOf("1", "1", "2") - ) + workflow = + randomWorkflow( + id = workflowResponse.id, + monitorIds = listOf("1", "1", "2"), + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Duplicate delegates not allowed") + it.contains("Duplicate delegates not allowed"), ) } } } fun `test create workflow delegate monitor doesn't exist failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - val workflow = randomWorkflow( - monitorIds = listOf("-1", monitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf("-1", monitorResponse.id), + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("are not valid monitor ids") + it.contains("are not valid monitor ids"), ) } } } fun `test update workflow delegate monitor doesn't exist failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflow( - id = workflowResponse.id, - monitorIds = listOf("-1", monitorResponse.id) - ) + workflow = + randomWorkflow( + id = workflowResponse.id, + monitorIds = listOf("-1", monitorResponse.id), + ) try { upsertWorkflow(workflow) @@ -5560,81 +6163,92 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("are not valid monitor ids") + it.contains("are not valid monitor ids"), ) } } } fun `test create workflow sequence order not correct failure`() { - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(1, "monitor-2"), - Delegate(2, "monitor-3") - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3"), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values"), ) } } } fun `test update workflow sequence order not correct failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(1, "monitor-2"), - Delegate(2, "monitor-3") - ) - workflow = randomWorkflowWithDelegates( - id = workflowResponse.id, - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3"), + ) + workflow = + randomWorkflowWithDelegates( + id = workflowResponse.id, + delegates = delegates, + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values"), ) } } } fun `test create workflow chained findings monitor not in sequence failure`() { - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")) - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { upsertWorkflow(workflow) @@ -5642,89 +6256,105 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence"), ) } } } fun `test create workflow query monitor chained findings monitor failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val docMonitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val docMonitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val docMonitorResponse = createMonitor(docMonitor)!! val queryMonitor = randomQueryLevelMonitor() val queryMonitorResponse = createMonitor(queryMonitor)!! - val workflow = randomWorkflow( - monitorIds = listOf(queryMonitorResponse.id, docMonitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(queryMonitorResponse.id, docMonitorResponse.id), + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Query level monitor can't be part of chained findings") + it.contains("Query level monitor can't be part of chained findings"), ) } } } fun `test create workflow delegate and chained finding monitor different indices failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val docMonitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val docMonitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val docMonitorResponse = createMonitor(docMonitor)!! val index1 = "$index-1" createTestIndex(index1) - val docLevelInput1 = DocLevelMonitorInput( - "description", listOf(index1), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput1 = + DocLevelMonitorInput( + "description", + listOf(index1), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) - val docMonitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger) - ) + val docMonitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger), + ) val docMonitorResponse1 = createMonitor(docMonitor1)!! - val workflow = randomWorkflow( - monitorIds = listOf(docMonitorResponse1.id, docMonitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(docMonitorResponse1.id, docMonitorResponse.id), + ) try { upsertWorkflow(workflow) } catch (e: Exception) { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("doesn't query all of chained findings monitor's indices") + it.contains("doesn't query all of chained findings monitor's indices"), ) } } } fun `test create workflow when monitor index not initialized failure`() { - val delegates = listOf( - Delegate(1, "monitor-1") - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { upsertWorkflow(workflow) @@ -5732,39 +6362,46 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Monitors not found") + it.contains("Monitors not found"), ) } } } fun `test update workflow chained findings monitor not in sequence failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")) - ) - workflow = randomWorkflowWithDelegates( - id = workflowResponse.id, - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")), + ) + workflow = + randomWorkflowWithDelegates( + id = workflowResponse.id, + delegates = delegates, + ) try { upsertWorkflow(workflow) @@ -5772,21 +6409,23 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence"), ) } } } fun `test create workflow chained findings order not correct failure`() { - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")) - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { upsertWorkflow(workflow) @@ -5794,38 +6433,45 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3"), ) } } } fun `test update workflow chained findings order not correct failure`() { - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor)!! - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflow)!! assertNotNull("Workflow creation failed", workflowResponse) - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")) - ) - workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")), + ) + workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { upsertWorkflow(workflow) @@ -5833,7 +6479,7 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3"), ) } } @@ -5841,49 +6487,60 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fun `test create workflow with chained alert triggers`() { val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse1 = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val chainedAlertTrigger1 = randomChainedAlertTrigger( - condition = Script("monitor[id=${monitorResponse1.id}] && monitor[id=${monitorResponse2.id}") - ) - val chainedAlertTrigger2 = randomChainedAlertTrigger( - condition = Script("monitor[id=${monitorResponse1.id}] || monitor[id=${monitorResponse2.id}]") - ) - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), - triggers = listOf( - chainedAlertTrigger1, - chainedAlertTrigger2 + val chainedAlertTrigger1 = + randomChainedAlertTrigger( + condition = Script("monitor[id=${monitorResponse1.id}] && monitor[id=${monitorResponse2.id}"), + ) + val chainedAlertTrigger2 = + randomChainedAlertTrigger( + condition = Script("monitor[id=${monitorResponse1.id}] || monitor[id=${monitorResponse2.id}]"), + ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), + triggers = + listOf( + chainedAlertTrigger1, + chainedAlertTrigger2, + ), ) - ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id)!! @@ -5895,12 +6552,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals( "Workflow input not correct", (workflowById.triggers.get(0) as ChainedAlertTrigger).condition.idOrCode, - chainedAlertTrigger1.condition.idOrCode + chainedAlertTrigger1.condition.idOrCode, ) assertEquals( "Workflow input not correct", (workflowById.triggers.get(1) as ChainedAlertTrigger).condition.idOrCode, - chainedAlertTrigger2.condition.idOrCode + chainedAlertTrigger2.condition.idOrCode, ) } @@ -5908,29 +6565,34 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) val monitorResponse = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]") - ) - val notTrigger = randomChainedAlertTrigger( - name = "Not1OrNot2", - condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]") - ) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id), - triggers = listOf(andTrigger) - ) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]"), + ) + val notTrigger = + randomChainedAlertTrigger( + name = "Not1OrNot2", + condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]"), + ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + triggers = listOf(andTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -5943,16 +6605,18 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { indexDoc(index, "1", testDoc1) val workflowId = workflowById!!.id var executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! - var res = getWorkflowAlerts( - workflowId, - ) + var res = + getWorkflowAlerts( + workflowId, + ) var chainedAlerts = res.alerts Assert.assertTrue(chainedAlerts.size == 1) - val updatedWorkflowResponse = upsertWorkflow( - workflowById.copy(triggers = listOf(notTrigger)), - workflowResponse.id, - RestRequest.Method.PUT - )!! + val updatedWorkflowResponse = + upsertWorkflow( + workflowById.copy(triggers = listOf(notTrigger)), + workflowResponse.id, + RestRequest.Method.PUT, + )!! val updatedWorkflow = searchWorkflow(workflowResponse.id) Assert.assertTrue(updatedWorkflow!!.triggers.size == 1) Assert.assertTrue(updatedWorkflow.triggers[0].id == notTrigger.id) @@ -5964,17 +6628,19 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val searchRequest = SearchRequest(AlertIndices.ALERT_HISTORY_ALL) val sr = client().search(searchRequest).get() Assert.assertTrue(sr.hits.hits.size == 3) - val alerts = sr.hits.map { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry(), - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert - } + val alerts = + sr.hits.map { hit -> + val xcp = + XContentHelper.createParser( + xContentRegistry(), + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert + } Assert.assertTrue(alerts.stream().anyMatch { it.state == Alert.State.DELETED && chainedAlerts[0].id == it.id }) } @@ -5982,29 +6648,34 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) val monitorResponse = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]") - ) - val notTrigger = randomChainedAlertTrigger( - name = "Not1OrNot2", - condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]") - ) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id), - triggers = listOf(andTrigger) - ) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]"), + ) + val notTrigger = + randomChainedAlertTrigger( + name = "Not1OrNot2", + condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]"), + ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + triggers = listOf(andTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) @@ -6017,9 +6688,10 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { indexDoc(index, "1", testDoc1) val workflowId = workflowById!!.id var executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! - var res = getWorkflowAlerts( - workflowId, - ) + var res = + getWorkflowAlerts( + workflowId, + ) var chainedAlerts = res.alerts Assert.assertTrue(chainedAlerts.size == 1) val deleteRes = deleteWorkflow(workflowId, false) @@ -6032,17 +6704,19 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val searchRequest = SearchRequest(AlertIndices.ALERT_HISTORY_ALL) val sr = client().search(searchRequest).get() Assert.assertTrue(sr.hits.hits.size == 3) - val alerts = sr.hits.map { hit -> - val xcp = XContentHelper.createParser( - xContentRegistry(), - LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, - XContentType.JSON - ) - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) - val alert = Alert.parse(xcp, hit.id, hit.version) - alert - } + val alerts = + sr.hits.map { hit -> + val xcp = + XContentHelper.createParser( + xContentRegistry(), + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val alert = Alert.parse(xcp, hit.id, hit.version) + alert + } Assert.assertTrue(alerts.stream().anyMatch { it.state == Alert.State.DELETED && chainedAlerts[0].id == it.id }) } @@ -6050,25 +6724,29 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) val monitorResponse = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]") - ) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id), - triggers = listOf(andTrigger) - ) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]"), + ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + triggers = listOf(andTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) val workflowId = workflowById!!.id @@ -6090,35 +6768,51 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { fail("Bulk request to index to test index has failed") } var executeWorkflowResponse = executeWorkflow(workflowById, workflowId, false)!! - var res = getWorkflowAlerts( - workflowId = workflowId - ) + var res = + getWorkflowAlerts( + workflowId = workflowId, + ) Assert.assertTrue(executeWorkflowResponse.workflowRunResult.triggerResults[andTrigger.id]!!.triggered) var chainedAlerts = res.alerts Assert.assertTrue(chainedAlerts.size == 1) Assert.assertEquals(res.associatedAlerts.size, 10) - var res100to200 = getWorkflowAlerts( - workflowId = workflowId, - alertIds = listOf(res.alerts[0].id), - table = Table("asc", "monitor_id", null, 100, 100, null) - ) + var res100to200 = + getWorkflowAlerts( + workflowId = workflowId, + alertIds = listOf(res.alerts[0].id), + table = Table("asc", "monitor_id", null, 100, 100, null), + ) Assert.assertEquals(res100to200.associatedAlerts.size, 100) - var res200to300 = getWorkflowAlerts( - workflowId = workflowId, - alertIds = listOf(res.alerts[0].id), - table = Table("asc", "monitor_id", null, 100, 201, null) - ) + var res200to300 = + getWorkflowAlerts( + workflowId = workflowId, + alertIds = listOf(res.alerts[0].id), + table = Table("asc", "monitor_id", null, 100, 201, null), + ) Assert.assertEquals(res200to300.associatedAlerts.size, 100) - var res0to99 = getWorkflowAlerts( - workflowId = workflowId, - alertIds = listOf(res.alerts[0].id), - table = Table("asc", "monitor_id", null, 100, 0, null) - ) + var res0to99 = + getWorkflowAlerts( + workflowId = workflowId, + alertIds = listOf(res.alerts[0].id), + table = Table("asc", "monitor_id", null, 100, 0, null), + ) Assert.assertEquals(res0to99.associatedAlerts.size, 100) - val ids100to200 = res100to200.associatedAlerts.stream().map { it.id }.collect(Collectors.toSet()) - val idsSet0to99 = res0to99.associatedAlerts.stream().map { it.id }.collect(Collectors.toSet()) - val idsSet200to300 = res200to300.associatedAlerts.stream().map { it.id }.collect(Collectors.toSet()) + val ids100to200 = + res100to200.associatedAlerts + .stream() + .map { it.id } + .collect(Collectors.toSet()) + val idsSet0to99 = + res0to99.associatedAlerts + .stream() + .map { it.id } + .collect(Collectors.toSet()) + val idsSet200to300 = + res200to300.associatedAlerts + .stream() + .map { it.id } + .collect(Collectors.toSet()) Assert.assertTrue(idsSet0to99.all { it !in ids100to200 }) Assert.assertTrue(idsSet0to99.all { it !in idsSet200to300 }) @@ -6129,24 +6823,28 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery1 = DocLevelQuery(query = "test_field_1:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) - var monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) + var monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + ) val monitorResponse = createMonitor(monitor1)!! val monitorResponse2 = createMonitor(monitor2)!! - val notTrigger = randomChainedAlertTrigger( - name = "Not1OrNot2", - condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]") - ) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id), - triggers = listOf(notTrigger) - ) + val notTrigger = + randomChainedAlertTrigger( + name = "Not1OrNot2", + condition = Script("!monitor[id=${monitorResponse.id}] || !monitor[id=${monitorResponse2.id}]"), + ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + triggers = listOf(notTrigger), + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) val workflowId = workflowById!!.id @@ -6166,7 +6864,14 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { Assert.assertTrue(udpdatedActiveAlerts.alerts[0].lastNotificationTime!! > workflowAlerts.alerts[0].lastNotificationTime!!) /** Acknowledge ACTIVE alert*/ - val ackChainedAlerts = ackChainedAlerts(udpdatedActiveAlerts.alerts.stream().map { it.id }.collect(Collectors.toList()), workflowId) + val ackChainedAlerts = + ackChainedAlerts( + udpdatedActiveAlerts.alerts + .stream() + .map { it.id } + .collect(Collectors.toList()), + workflowId, + ) Assert.assertTrue(ackChainedAlerts.acknowledged.size == 1) Assert.assertTrue(ackChainedAlerts.missing.size == 0) Assert.assertTrue(ackChainedAlerts.failed.size == 0) @@ -6212,11 +6917,12 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources(queryIndex = ".opensearch-alerting-custom-queries") - ) + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = DataSources(queryIndex = ".opensearch-alerting-custom-queries"), + ) val monitorResponse = createMonitor(monitor) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(MILLIS)) val testDoc = """{ @@ -6235,15 +6941,15 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "1", response!!.getSetting( ".opensearch-alerting-custom-queries-000001", - IndexMetadata.SETTING_NUMBER_OF_SHARDS - ) + IndexMetadata.SETTING_NUMBER_OF_SHARDS, + ), ) assertEquals( "0", response.getSetting( ".opensearch-alerting-custom-queries-000001", - IndexMetadata.SETTING_NUMBER_OF_REPLICAS - ) + IndexMetadata.SETTING_NUMBER_OF_REPLICAS, + ), ) executeMonitor(monitor, id, false) @@ -6253,61 +6959,69 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "1", response!!.getSetting( ".opensearch-alerting-custom-queries-000001", - IndexMetadata.SETTING_NUMBER_OF_SHARDS - ) + IndexMetadata.SETTING_NUMBER_OF_SHARDS, + ), ) assertEquals( "0", response.getSetting( ".opensearch-alerting-custom-queries-000001", - IndexMetadata.SETTING_NUMBER_OF_REPLICAS - ) + IndexMetadata.SETTING_NUMBER_OF_REPLICAS, + ), ) } fun `test execute workflow when bucket monitor is used in chained finding of ignored doc monitor`() { - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field_1").field("test_field_1") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field_1").field("test_field_1"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(index), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Bucket level monitor will reduce the size of matched doc ids on those that belong // to a bucket that contains more than 1 document after term grouping - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null, + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val bucketCustomAlertsIndex = "custom_alerts_index" val bucketCustomFindingsIndex = "custom_findings_index" val bucketCustomFindingsIndexPattern = "custom_findings_index-1" - val bucketLevelMonitorResponse = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources( - findingsEnabled = true, - alertsIndex = bucketCustomAlertsIndex, - findingsIndex = bucketCustomFindingsIndex, - findingsIndexPattern = bucketCustomFindingsIndexPattern - ) - ) - )!! + val bucketLevelMonitorResponse = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = + DataSources( + findingsEnabled = true, + alertsIndex = bucketCustomAlertsIndex, + findingsIndex = bucketCustomFindingsIndex, + findingsIndexPattern = bucketCustomFindingsIndexPattern, + ), + ), + )!! val docQuery1 = DocLevelQuery(query = "test_field_1:\"test_value_2\"", name = "1", fields = listOf()) val docQuery2 = DocLevelQuery(query = "test_field_1:\"test_value_1\"", name = "2", fields = listOf()) @@ -6317,24 +7031,27 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val docCustomAlertsIndex = "custom_alerts_index" val docCustomFindingsIndex = "custom_findings_index" val docCustomFindingsIndexPattern = "custom_findings_index-1" - var docLevelMonitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(docTrigger), - dataSources = DataSources( - alertsIndex = docCustomAlertsIndex, - findingsIndex = docCustomFindingsIndex, - findingsIndexPattern = docCustomFindingsIndexPattern - ), - ignoreFindingsAndAlerts = true - ) + var docLevelMonitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(docTrigger), + dataSources = + DataSources( + alertsIndex = docCustomAlertsIndex, + findingsIndex = docCustomFindingsIndex, + findingsIndexPattern = docCustomFindingsIndexPattern, + ), + ignoreFindingsAndAlerts = true, + ) val docLevelMonitorResponse = createMonitor(docLevelMonitor)!! // 1. bucketMonitor (chainedFinding = null) 2. docMonitor (chainedFinding = bucketMonitor) - var workflow = randomWorkflow( - monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id), - enabled = false, - auditDelegateMonitorAlerts = false - ) + var workflow = + randomWorkflow( + monitorIds = listOf(bucketLevelMonitorResponse.id, docLevelMonitorResponse.id), + enabled = false, + auditDelegateMonitorAlerts = false, + ) val workflowResponse = upsertWorkflow(workflow)!! val workflowById = searchWorkflow(workflowResponse.id) assertNotNull(workflowById) @@ -6347,8 +7064,8 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { "test_value_1", // adding duplicate to verify aggregation "test_value_2", "test_value_2", - "test_value_3" - ) + "test_value_3", + ), ) val workflowId = workflowResponse.id @@ -6362,8 +7079,11 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { val searchResult = monitorRunResults.inputResults.results.first() @Suppress("UNCHECKED_CAST") - val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg") - ?.get("buckets") as List> + val buckets = + searchResult + .stringMap("aggregations") + ?.stringMap("composite_agg") + ?.get("buckets") as List> assertEquals("Incorrect search result", 3, buckets.size) val getAlertsResponse = assertAlerts(bucketLevelMonitorResponse.id, bucketCustomAlertsIndex, 2, workflowId) @@ -6388,27 +7108,31 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { createIndex(index1, Settings.EMPTY) val q1 = DocLevelQuery(query = "properties:\"abcd\"", name = "1", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", - listOf(index1), - listOf(q1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index1), + listOf(q1), + ) val customQueryIndex = "custom_alerts_index" - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - dataSources = DataSources( - queryIndex = customQueryIndex + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + dataSources = + DataSources( + queryIndex = customQueryIndex, + ), ) - ) val monitorResponse = createMonitor(monitor)!! - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = upsertWorkflow(workflowRequest)!! val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflowById(id = workflowResponse.id) @@ -6438,22 +7162,24 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals(2, lastRunContextBeforeDisable?.get("0")) // Disable workflow - val disabledWorkflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id), - id = workflowId, - enabled = false - ) + val disabledWorkflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + id = workflowId, + enabled = false, + ) upsertWorkflow(disabledWorkflowRequest, method = RestRequest.Method.PUT, id = workflowId) // Index doc. Since workflow is disabled, monitor workflow metadata shouldn't be updated indexDoc(index1, "4", testDoc1) // re-enable workflow - val enabledWorkflowRequest = randomWorkflow( - monitorIds = listOf(monitorResponse.id), - id = workflowId, - enabled = true - ) + val enabledWorkflowRequest = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + id = workflowId, + enabled = true, + ) upsertWorkflow(enabledWorkflowRequest, method = RestRequest.Method.PUT, id = workflowId) // Assert no new findings generated after workflow is re-enabled @@ -6474,21 +7200,26 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { // Setup doc level monitor val docQuery = DocLevelQuery(query = "eventType:\"login\"", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val customFindingsIndex = "custom_findings_index" val customFindingsIndexPattern = "custom_findings_index-1" val customQueryIndex = "custom_alerts_index" - var monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(), - dataSources = DataSources( - queryIndex = customQueryIndex, - findingsIndex = customFindingsIndex, - findingsIndexPattern = customFindingsIndexPattern + var monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(), + dataSources = + DataSources( + queryIndex = customQueryIndex, + findingsIndex = customFindingsIndex, + findingsIndexPattern = customFindingsIndexPattern, + ), ) - ) val monitorResponse = createMonitor(monitor) assertFalse(monitorResponse?.id.isNullOrEmpty()) @@ -6513,17 +7244,19 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals(0, lastRunContextBeforeDisable?.get("0")) // Disable monitor - var updateMonitorResponse = updateMonitor( - monitor.copy( - id = monitorResponse.id, - dataSources = DataSources( - queryIndex = customQueryIndex, + var updateMonitorResponse = + updateMonitor( + monitor.copy( + id = monitorResponse.id, + dataSources = + DataSources( + queryIndex = customQueryIndex, + ), + enabled = false, + enabledTime = null, ), - enabled = false, - enabledTime = null - ), - monitorResponse.id - ) + monitorResponse.id, + ) Assert.assertNotNull(updateMonitorResponse) // Index doc. Since monitor is disabled, monitor workflow metadata shouldn't be updated @@ -6539,17 +7272,19 @@ class MonitorDataSourcesIT : AlertingSingleNodeTestCase() { assertEquals(1, findings.size) // re-enable monitor - updateMonitorResponse = updateMonitor( - monitor.copy( - id = monitorResponse.id, - dataSources = DataSources( - queryIndex = customQueryIndex, + updateMonitorResponse = + updateMonitor( + monitor.copy( + id = monitorResponse.id, + dataSources = + DataSources( + queryIndex = customQueryIndex, + ), + enabled = true, + enabledTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), ), - enabled = true, - enabledTime = Instant.now().truncatedTo(ChronoUnit.MILLIS) - ), - monitorResponse.id - ) + monitorResponse.id, + ) Assert.assertNotNull(updateMonitorResponse) executeMonitorResponse = executeMonitor(monitor, id, false) Assert.assertNotNull(executeMonitorResponse) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorFanOutUtilsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorFanOutUtilsTests.kt index 91afb23b9..1988c875e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorFanOutUtilsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorFanOutUtilsTests.kt @@ -7,31 +7,36 @@ package org.opensearch.alerting import org.opensearch.core.index.Index import org.opensearch.core.index.shard.ShardId import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class MonitorFanOutUtilsTests : OpenSearchTestCase() { + @Test fun `test distribute few shards many nodes`() { - val result = distributeShards( - 1000, - listOf("nodeA", "nodeB", "nodeC", "nodeD", "nodeE"), - listOf("0", "1"), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf("nodeA", "nodeB", "nodeC", "nodeD", "nodeE"), + listOf("0", "1"), + Index("index1", "id1"), + ) validateDistribution(result, 2, listOf(1), 2) } + @Test fun `test distribute randomizes the assigned node`() { val nodes = mutableSetOf() // Picking a node to distribute to is random. To reduce test flakiness, we run this 100 times to give a (1/5)^99 chance // that the same node is picked every time repeat(100) { - val result = distributeShards( - 1000, - listOf("nodeA", "nodeB", "nodeC", "nodeD", "nodeE"), - listOf("0"), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf("nodeA", "nodeB", "nodeC", "nodeD", "nodeE"), + listOf("0"), + Index("index1", "id1"), + ) validateDistribution(result, 1, listOf(1), 1) nodes.addAll(result.keys) @@ -40,79 +45,93 @@ class MonitorFanOutUtilsTests : OpenSearchTestCase() { assertTrue(nodes.size > 1) } + @Test fun `test distribute many shards few nodes`() { - val result = distributeShards( - 1000, - listOf("nodeA", "nodeB", "nodeC"), - listOf("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf("nodeA", "nodeB", "nodeC"), + listOf("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"), + Index("index1", "id1"), + ) validateDistribution(result, 3, listOf(3, 4), 10) } + @Test fun `test distribute max nodes limits`() { - val result = distributeShards( - 2, - listOf("nodeA", "nodeB", "nodeC"), - listOf("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"), - Index("index1", "id1") - ) + val result = + distributeShards( + 2, + listOf("nodeA", "nodeB", "nodeC"), + listOf("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"), + Index("index1", "id1"), + ) validateDistribution(result, 2, listOf(5), 10) } + @Test fun `test distribute edge case 1 shard`() { - val result = distributeShards( - 1000, - listOf("nodeA", "nodeB", "nodeC"), - listOf("0"), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf("nodeA", "nodeB", "nodeC"), + listOf("0"), + Index("index1", "id1"), + ) validateDistribution(result, 1, listOf(1), 1) } + @Test fun `test distribute edge case 1 node`() { - val result = distributeShards( - 1000, - listOf("nodeA"), - listOf("0", "1", "2"), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf("nodeA"), + listOf("0", "1", "2"), + Index("index1", "id1"), + ) validateDistribution(result, 1, listOf(3), 3) } + @Test fun `test distribute edge case 1 shard 1 node`() { - val result = distributeShards( - 1000, - listOf("nodeA"), - listOf("0"), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf("nodeA"), + listOf("0"), + Index("index1", "id1"), + ) validateDistribution(result, 1, listOf(1), 1) } + @Test fun `test distribute edge case no nodes does not throw`() { - val result = distributeShards( - 1000, - listOf(), - listOf("0"), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf(), + listOf("0"), + Index("index1", "id1"), + ) validateDistribution(result, 0, listOf(), 0) } + @Test fun `test distribute edge case no shards does not throw`() { - val result = distributeShards( - 1000, - listOf("nodeA"), - listOf(), - Index("index1", "id1") - ) + val result = + distributeShards( + 1000, + listOf("nodeA"), + listOf(), + Index("index1", "id1"), + ) validateDistribution(result, 0, listOf(), 0) } @@ -121,7 +140,7 @@ class MonitorFanOutUtilsTests : OpenSearchTestCase() { result: Map>, expectedNodeCount: Int, expectedShardsPerNode: List, - expectedTotalShardCount: Int + expectedTotalShardCount: Int, ) { assertEquals(expectedNodeCount, result.keys.size) var shardCount = 0 diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt index 5c3252f56..54902f344 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorRunnerServiceIT.kt @@ -60,12 +60,12 @@ import java.time.temporal.ChronoUnit.MINUTES import java.util.concurrent.TimeUnit class MonitorRunnerServiceIT : AlertingRestTestCase() { - fun `test execute monitor with dryrun`() { val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val monitor = randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) - ) + val monitor = + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ) val response = executeMonitor(monitor, params = DRYRUN_MONITOR) @@ -73,7 +73,8 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) for (triggerResult in output.objectMap("trigger_results").values) { for (actionResult in triggerResult.objectMap("action_results").values) { - @Suppress("UNCHECKED_CAST") val actionOutput = actionResult["output"] as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = actionResult["output"] as Map assertEquals("Hello ${monitor.name}", actionOutput["subject"]) assertEquals("Hello ${monitor.name}", actionOutput["message"]) } @@ -90,15 +91,18 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val testDoc = """{ "test_strict_date_time" : "$testTime" }""" indexDoc(testIndex, "1", testDoc) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().query(query)) - val triggerScript = """ + val triggerScript = + """ // make sure there is exactly one hit return ctx.results[0].hits.hits.size() == 1 - """.trimIndent() + """.trimIndent() val trigger = randomQueryLevelTrigger(condition = Script(triggerScript)) val monitor = randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger)) @@ -109,6 +113,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val total = searchResult.stringMap("hits")?.get("total") as Map assertEquals("Incorrect search result", 1, total["value"]) @@ -130,11 +135,12 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { } fun `test active alert is updated on each run`() { - val monitor = createMonitor( - randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, destinationId = createDestination().id)) + val monitor = + createMonitor( + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, destinationId = createDestination().id)), + ), ) - ) executeMonitor(monitor.id) val firstRunAlert = searchAlerts(monitor).single() @@ -152,7 +158,8 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals("Start time shouldn't change", firstRunAlert.startTime, secondRunAlert.startTime) assertNotEquals( "Last notification should be different.", - firstRunAlert.lastNotificationTime, secondRunAlert.lastNotificationTime + firstRunAlert.lastNotificationTime, + secondRunAlert.lastNotificationTime, ) } @@ -160,12 +167,13 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { // use a non-existent index to trigger an input error createIndex("foo", Settings.EMPTY) val input = SearchInput(indices = listOf("foo"), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) - val monitor = createMonitor( - randomQueryLevelMonitor( - inputs = listOf(input), - triggers = listOf(randomQueryLevelTrigger(condition = NEVER_RUN)) + val monitor = + createMonitor( + randomQueryLevelMonitor( + inputs = listOf(input), + triggers = listOf(randomQueryLevelTrigger(condition = NEVER_RUN)), + ), ) - ) deleteIndex("foo") val response = executeMonitor(monitor.id) @@ -185,12 +193,13 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { // use a non-existent monitoid to trigger a 404. createIndex("foo", Settings.EMPTY) val input = SearchInput(indices = listOf("foo"), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) - val monitor = createMonitor( - randomQueryLevelMonitor( - inputs = listOf(input), - triggers = listOf(randomQueryLevelTrigger(condition = NEVER_RUN)) + val monitor = + createMonitor( + randomQueryLevelMonitor( + inputs = listOf(input), + triggers = listOf(randomQueryLevelTrigger(condition = NEVER_RUN)), + ), ) - ) var exception: ResponseException? = null try { @@ -207,15 +216,17 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { createIndex(index, Settings.EMPTY) val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "1", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(index), listOf(docQuery)) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf() - ) - ) - val doc = """ + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(), + ), + ) + val doc = + """ { "test_field": "us-west-2" } - """.trimIndent() + """.trimIndent() indexDoc(index, "1", doc) val response = executeMonitor(monitor.id) @@ -231,12 +242,13 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { createIndex("foo", Settings.EMPTY) val input = SearchInput(indices = listOf("foo"), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) - val monitor = createMonitor( - randomQueryLevelMonitor( - inputs = listOf(input), - triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, destinationId = destinationId)) + val monitor = + createMonitor( + randomQueryLevelMonitor( + inputs = listOf(input), + triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, destinationId = destinationId)), + ), ) - ) var response = executeMonitor(monitor.id) @@ -258,11 +270,12 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { } fun `test acknowledged alert is not updated unnecessarily`() { - val monitor = createMonitor( - randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, destinationId = createDestination().id)) + val monitor = + createMonitor( + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, destinationId = createDestination().id)), + ), ) - ) executeMonitor(monitor.id) acknowledgeAlerts(monitor, searchAlerts(monitor).single()) val acknowledgedAlert = searchAlerts(monitor).single() @@ -318,9 +331,10 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { fun `test execute action template error`() { // Intentional syntax error in mustache template val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name")) - val monitor = randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))) - ) + val monitor = + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + ) val response = executeMonitor(monitor) @@ -344,15 +358,18 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val testDoc = """{ "test_strict_date_time" : "$testTime" }""" indexDoc(testIndex, "1", testDoc) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") val input = SearchInput(indices = listOf(".*"), query = SearchSourceBuilder().query(query)) - val triggerScript = """ + val triggerScript = + """ // make sure there is at least one monitor return ctx.results[0].hits.hits.size() > 0 - """.trimIndent() + """.trimIndent() val destinationId = createDestination().id val trigger = randomQueryLevelTrigger(condition = Script(triggerScript), destinationId = destinationId) val monitor = createMonitor(randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger))) @@ -380,15 +397,18 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { // Queries that use period_start/end should expect these values to always be formatted as 'epoch_millis'. Either // the query should specify the format (like below) or the mapping for the index/field being queried should allow // epoch_millis as an alternative (OpenSearch's default mapping for date fields "strict_date_optional_time||epoch_millis") - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().query(query)) - val triggerScript = """ + val triggerScript = + """ // make sure there is exactly one hit return ctx.results[0].hits.hits.size() == 1 - """.trimIndent() + """.trimIndent() val trigger = randomQueryLevelTrigger(condition = Script(triggerScript)) val monitor = randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger)) @@ -410,9 +430,10 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { // Give the index name in the date math format. val testIndex = "" // Add percent encoding for the http client to resolve the format. - val encodedTestIndex = createTestIndex( - URLEncoder.encode(testIndex, "utf-8") - ) + val encodedTestIndex = + createTestIndex( + URLEncoder.encode(testIndex, "utf-8"), + ) val fiveDaysAgo = ZonedDateTime.now().minus(5, DAYS).truncatedTo(MILLIS) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(fiveDaysAgo) @@ -422,15 +443,18 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { // Queries that use period_start/end should expect these values to always be formatted as 'epoch_millis'. Either // the query should specify the format (like below) or the mapping for the index/field being queried should allow // epoch_millis as an alternative (OpenSearch's default mapping for date fields "strict_date_optional_time||epoch_millis") - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().query(query)) - val triggerScript = """ + val triggerScript = + """ // make sure there is exactly one hit return ctx.results[0].hits.hits.size() == 1 - """.trimIndent() + """.trimIndent() val trigger = randomQueryLevelTrigger(condition = Script(triggerScript)) val monitor = randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger)) @@ -441,32 +465,37 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val total = searchResult.stringMap("hits")?.get("total") as Map assertEquals("Incorrect search result", 1, total["value"]) } fun `test monitor with one bad action and one good action`() { - val goodAction = randomAction( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id - ) - val syntaxErrorAction = randomAction( - name = "bad syntax", - template = randomTemplateScript("{{foo"), - destinationId = createDestination().id - ) + val goodAction = + randomAction( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + ) + val syntaxErrorAction = + randomAction( + name = "bad syntax", + template = randomTemplateScript("{{foo"), + destinationId = createDestination().id, + ) val actions = listOf(goodAction, syntaxErrorAction) - val monitor = createMonitor( - randomQueryLevelMonitor(triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = actions))) - ) + val monitor = + createMonitor( + randomQueryLevelMonitor(triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = actions))), + ) val output = entityAsMap(executeMonitor(monitor.id)) assertEquals(monitor.name, output["monitor_name"]) for (triggerResult in output.objectMap("trigger_results").values) { for (actionResult in triggerResult.objectMap("action_results").values) { - @Suppress("UNCHECKED_CAST") val actionOutput = actionResult["output"] as Map + @Suppress("UNCHECKED_CAST") + val actionOutput = actionResult["output"] as Map if (actionResult["name"] == goodAction.name) { assertEquals("Hello ${monitor.name}", actionOutput["message"]) } else if (actionResult["name"] == syntaxErrorAction.name) { @@ -489,12 +518,16 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val trigger = randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action)) val monitor = createMonitor(randomQueryLevelMonitor(triggers = listOf(trigger))) val listOfFiveErrorMessages = (1..5).map { i -> AlertError(timestamp = Instant.now(), message = "error message $i") } - val activeAlert = createAlert( - randomAlert(monitor).copy( - state = ACTIVE, errorHistory = listOfFiveErrorMessages, - triggerId = trigger.id, triggerName = trigger.name, severity = trigger.severity + val activeAlert = + createAlert( + randomAlert(monitor).copy( + state = ACTIVE, + errorHistory = listOfFiveErrorMessages, + triggerId = trigger.id, + triggerName = trigger.name, + severity = trigger.severity, + ), ) - ) val response = executeMonitor(monitor.id) @@ -512,17 +545,19 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { fun `test latest error is not lost when alert is completed`() { // Creates an active alert the first time it's run and completes it the second time the monitor is run. - val trigger = randomQueryLevelTrigger( - condition = Script( - """ - if (ctx.alert == null) { - throw new RuntimeException("foo"); - } else { - return false; - } - """.trimIndent() + val trigger = + randomQueryLevelTrigger( + condition = + Script( + """ + if (ctx.alert == null) { + throw new RuntimeException("foo"); + } else { + return false; + } + """.trimIndent(), + ), ) - ) val monitor = createMonitor(randomQueryLevelMonitor(triggers = listOf(trigger))) executeMonitor(monitor.id) @@ -540,13 +575,15 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { fun `test throw script exception`() { // Creates an active alert the first time it's run and completes it the second time the monitor is run. - val trigger = randomQueryLevelTrigger( - condition = Script( - """ - param[0]; return true - """.trimIndent() + val trigger = + randomQueryLevelTrigger( + condition = + Script( + """ + param[0]; return true + """.trimIndent(), + ), ) - ) val monitor = createMonitor(randomQueryLevelMonitor(triggers = listOf(trigger))) executeMonitor(monitor.id) @@ -555,7 +592,8 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { executeMonitor(monitor.id) assertEquals( "Error does not match", - "Failed evaluating trigger:\nparam[0]; return true\n ^---- HERE", errorAlert.errorMessage + "Failed evaluating trigger:\nparam[0]; return true\n ^---- HERE", + errorAlert.errorMessage, ) } @@ -566,12 +604,16 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val trigger = randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action)) val monitor = createMonitor(randomQueryLevelMonitor(triggers = listOf(trigger))) val listOfTenErrorMessages = (1..10).map { i -> AlertError(timestamp = Instant.now(), message = "error message $i") } - val activeAlert = createAlert( - randomAlert(monitor).copy( - state = ACTIVE, errorHistory = listOfTenErrorMessages, - triggerId = trigger.id, triggerName = trigger.name, severity = trigger.severity + val activeAlert = + createAlert( + randomAlert(monitor).copy( + state = ACTIVE, + errorHistory = listOfTenErrorMessages, + triggerId = trigger.id, + triggerName = trigger.name, + severity = trigger.severity, + ), ) - ) val response = executeMonitor(monitor.id) @@ -601,16 +643,18 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { } fun `test execute monitor non-dryrun`() { - val monitor = createMonitor( - randomQueryLevelMonitor( - triggers = listOf( - randomQueryLevelTrigger( - condition = ALWAYS_RUN, - actions = listOf(randomAction(destinationId = createDestination().id)) - ) - ) + val monitor = + createMonitor( + randomQueryLevelMonitor( + triggers = + listOf( + randomQueryLevelTrigger( + condition = ALWAYS_RUN, + actions = listOf(randomAction(destinationId = createDestination().id)), + ), + ), + ), ) - ) val response = executeMonitor(monitor.id, mapOf("dryrun" to "false")) @@ -621,16 +665,18 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { } fun `test execute monitor with already active alert`() { - val monitor = createMonitor( - randomQueryLevelMonitor( - triggers = listOf( - randomQueryLevelTrigger( - condition = ALWAYS_RUN, - actions = listOf(randomAction(destinationId = createDestination().id)) - ) - ) + val monitor = + createMonitor( + randomQueryLevelMonitor( + triggers = + listOf( + randomQueryLevelTrigger( + condition = ALWAYS_RUN, + actions = listOf(randomAction(destinationId = createDestination().id)), + ), + ), + ), ) - ) val firstExecuteResponse = executeMonitor(monitor.id, mapOf("dryrun" to "false")) @@ -650,11 +696,12 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { fun `test delete monitor with no alerts after alert indices is initialized`() { putAlertMappings() - val newMonitor = createMonitor( - randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = NEVER_RUN, actions = listOf(randomAction()))) + val newMonitor = + createMonitor( + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = NEVER_RUN, actions = listOf(randomAction()))), + ), ) - ) val deleteNewMonitorResponse = client().makeRequest("DELETE", "$ALERTING_BASE_URI/${newMonitor.id}") assertEquals("Delete request not successful", RestStatus.OK, deleteNewMonitorResponse.restStatus()) @@ -690,39 +737,45 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { } fun `test monitor with throttled action for same alert`() { - val actionThrottleEnabled = randomAction( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - throttleEnabled = true, throttle = Throttle(value = 5, unit = MINUTES) - ) - val actionThrottleNotEnabled = randomAction( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - throttleEnabled = false, throttle = Throttle(value = 5, unit = MINUTES) - ) + val actionThrottleEnabled = + randomAction( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + throttleEnabled = true, + throttle = Throttle(value = 5, unit = MINUTES), + ) + val actionThrottleNotEnabled = + randomAction( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + throttleEnabled = false, + throttle = Throttle(value = 5, unit = MINUTES), + ) val actions = listOf(actionThrottleEnabled, actionThrottleNotEnabled) - val monitor = createMonitor( - randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = actions)), - schedule = IntervalSchedule(interval = 1, unit = MINUTES) + val monitor = + createMonitor( + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = actions)), + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + ), ) - ) val monitorRunResultNotThrottled = entityAsMap(executeMonitor(monitor.id)) verifyActionThrottleResults( monitorRunResultNotThrottled, mutableMapOf( Pair(actionThrottleEnabled.id, false), - Pair(actionThrottleNotEnabled.id, false) - ) + Pair(actionThrottleNotEnabled.id, false), + ), ) val notThrottledAlert = searchAlerts(monitor) assertEquals("1 alert should be returned", 1, notThrottledAlert.size) verifyAlert(notThrottledAlert.single(), monitor, ACTIVE) - val notThrottledActionResults = verifyActionExecutionResultInAlert( - notThrottledAlert[0], - mutableMapOf(Pair(actionThrottleEnabled.id, 0), Pair(actionThrottleNotEnabled.id, 0)) - ) + val notThrottledActionResults = + verifyActionExecutionResultInAlert( + notThrottledAlert[0], + mutableMapOf(Pair(actionThrottleEnabled.id, 0), Pair(actionThrottleNotEnabled.id, 0)), + ) assertEquals(notThrottledActionResults.size, 2) val monitorRunResultThrottled = entityAsMap(executeMonitor(monitor.id)) @@ -730,40 +783,44 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { monitorRunResultThrottled, mutableMapOf( Pair(actionThrottleEnabled.id, true), - Pair(actionThrottleNotEnabled.id, false) - ) + Pair(actionThrottleNotEnabled.id, false), + ), ) val throttledAlert = searchAlerts(monitor) assertEquals("1 alert should be returned", 1, throttledAlert.size) verifyAlert(throttledAlert.single(), monitor, ACTIVE) - val throttledActionResults = verifyActionExecutionResultInAlert( - throttledAlert[0], - mutableMapOf(Pair(actionThrottleEnabled.id, 1), Pair(actionThrottleNotEnabled.id, 0)) - ) + val throttledActionResults = + verifyActionExecutionResultInAlert( + throttledAlert[0], + mutableMapOf(Pair(actionThrottleEnabled.id, 1), Pair(actionThrottleNotEnabled.id, 0)), + ) assertEquals(notThrottledActionResults.size, 2) assertEquals( notThrottledActionResults[actionThrottleEnabled.id]!!.lastExecutionTime, - throttledActionResults[actionThrottleEnabled.id]!!.lastExecutionTime + throttledActionResults[actionThrottleEnabled.id]!!.lastExecutionTime, ) } fun `test monitor with throttled action for different alerts`() { - val actionThrottleEnabled = randomAction( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - throttleEnabled = true, throttle = Throttle(value = 5, unit = MINUTES) - ) + val actionThrottleEnabled = + randomAction( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + throttleEnabled = true, + throttle = Throttle(value = 5, unit = MINUTES), + ) val actions = listOf(actionThrottleEnabled) val trigger = randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = actions) - val monitor = createMonitor( - randomQueryLevelMonitor( - triggers = listOf(trigger), - schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomQueryLevelMonitor( + triggers = listOf(trigger), + schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), + ), ) - ) val monitorRunResult1 = entityAsMap(executeMonitor(monitor.id)) verifyActionThrottleResults(monitorRunResult1, mutableMapOf(Pair(actionThrottleEnabled.id, false))) @@ -790,7 +847,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val actionResults2 = verifyActionExecutionResultInAlert(activeAlert2[0], mutableMapOf(Pair(actionThrottleEnabled.id, 0))) assertNotEquals( actionResults1[actionThrottleEnabled.id]!!.lastExecutionTime, - actionResults2[actionThrottleEnabled.id]!!.lastExecutionTime + actionResults2[actionThrottleEnabled.id]!!.lastExecutionTime, ) } @@ -799,26 +856,29 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val emailAccount = createRandomEmailAccount() val emailGroup = createRandomEmailGroup() - val email = Email( - emailAccountID = emailAccount.id, - recipients = listOf( - Recipient(type = Recipient.RecipientType.EMAIL, emailGroupID = null, email = "test@email.com"), - Recipient(type = Recipient.RecipientType.EMAIL_GROUP, emailGroupID = emailGroup.id, email = null) - ) - ) - - val destination = createDestination( - Destination( - type = DestinationType.EMAIL, - name = "testDesination", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = null, - customWebhook = null, - email = email + val email = + Email( + emailAccountID = emailAccount.id, + recipients = + listOf( + Recipient(type = Recipient.RecipientType.EMAIL, emailGroupID = null, email = "test@email.com"), + Recipient(type = Recipient.RecipientType.EMAIL_GROUP, emailGroupID = emailGroup.id, email = null), + ), + ) + + val destination = + createDestination( + Destination( + type = DestinationType.EMAIL, + name = "testDesination", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = null, + customWebhook = null, + email = email, + ), ) - ) val action = randomAction(destinationId = destination.id) val trigger = randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action)) val monitor = createMonitor(randomQueryLevelMonitor(triggers = listOf(trigger))) @@ -877,7 +937,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) assertTrue( "Monitor results should contain cluster_name, but found: $resultsContent", - resultsContent.toString().contains("cluster_name") + resultsContent.toString().contains("cluster_name"), ) assertNull("There should not be an error message, but found: $errorMessage", errorMessage) } @@ -900,7 +960,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) assertTrue( "Monitor results should contain monitor_name, but found: $resultsContent", - resultsContent.toString().contains("memory_size_in_bytes") + resultsContent.toString().contains("memory_size_in_bytes"), ) assertNull("There should not be an error message, but found: $errorMessage", errorMessage) } @@ -908,14 +968,16 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { fun `test create ClusterMetricsInput monitor with alert triggered`() { // GIVEN putAlertMappings() - val trigger = randomQueryLevelTrigger( - condition = Script( - """ - return ctx.results[0].number_of_pending_tasks >= 0 - """.trimIndent() - ), - destinationId = createDestination().id - ) + val trigger = + randomQueryLevelTrigger( + condition = + Script( + """ + return ctx.results[0].number_of_pending_tasks >= 0 + """.trimIndent(), + ), + destinationId = createDestination().id, + ) val path = "/_cluster/health" val input = randomClusterMetricsInput(path = path) val monitor = createMonitor(randomClusterMetricsMonitor(inputs = listOf(input), triggers = listOf(trigger))) @@ -931,7 +993,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { for (triggerResult in triggerResults) { assertTrue( "This triggerResult should be triggered: $triggerResult", - triggerResult.objectMap("action_results").isNotEmpty() + triggerResult.objectMap("action_results").isNotEmpty(), ) } @@ -943,13 +1005,15 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { fun `test create ClusterMetricsInput monitor with no alert triggered`() { // GIVEN putAlertMappings() - val trigger = randomQueryLevelTrigger( - condition = Script( - """ - return ctx.results[0].status.equals("red") - """.trimIndent() + val trigger = + randomQueryLevelTrigger( + condition = + Script( + """ + return ctx.results[0].status.equals("red") + """.trimIndent(), + ), ) - ) val path = "/_cluster/stats" val input = randomClusterMetricsInput(path = path) val monitor = createMonitor(randomClusterMetricsMonitor(inputs = listOf(input), triggers = listOf(trigger))) @@ -965,7 +1029,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { for (triggerResult in triggerResults) { assertTrue( "This triggerResult should not be triggered: $triggerResult", - triggerResult.objectMap("action_results").isEmpty() + triggerResult.objectMap("action_results").isEmpty(), ) } @@ -978,10 +1042,11 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val indices = (1..5).map { createTestIndex() }.toTypedArray() val pathParams = indices.joinToString(",") val path = "/_cluster/health" - val input = randomClusterMetricsInput( - path = path, - pathParams = pathParams - ) + val input = + randomClusterMetricsInput( + path = path, + pathParams = pathParams, + ) val monitor = createMonitor(randomClusterMetricsMonitor(inputs = listOf(input))) // WHEN @@ -996,7 +1061,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) assertTrue( "Monitor results should contain cluster_name, but found: $resultsContent", - resultsContent.toString().contains("cluster_name") + resultsContent.toString().contains("cluster_name"), ) assertNull("There should not be an error message, but found: $errorMessage", errorMessage) } @@ -1008,21 +1073,21 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { // the API from the list before executing the monitor. fun `test execute monitor with custom webhook destination and denied host`() { - listOf("http://10.1.1.1", "127.0.0.1").forEach { val customWebhook = CustomWebhook(it, null, null, 80, null, "PUT", emptyMap(), emptyMap(), null, null) - val destination = createDestination( - Destination( - type = DestinationType.CUSTOM_WEBHOOK, - name = "testDesination", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = null, - customWebhook = customWebhook, - email = null + val destination = + createDestination( + Destination( + type = DestinationType.CUSTOM_WEBHOOK, + name = "testDesination", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = null, + customWebhook = customWebhook, + email = null, + ), ) - ) val action = randomAction(destinationId = destination.id) val trigger = randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action)) val monitor = createMonitor(randomQueryLevelMonitor(triggers = listOf(trigger))) @@ -1047,13 +1112,13 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val response = executeMonitor(monitor, params = DRYRUN_MONITOR) val output = entityAsMap(response) @Suppress("UNCHECKED_CAST") - (output["trigger_results"] as HashMap).forEach { - _, v -> + (output["trigger_results"] as HashMap).forEach { _, v -> assertTrue((v as HashMap)["triggered"] as Boolean) } assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val total = searchResult.stringMap("hits")?.get("total") as Map assertEquals("Incorrect search result", 5, total["value"]) @@ -1070,20 +1135,22 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val detectorId = randomAlphaOfLength(5) prepareTestAnomalyResult(detectorId, user) // for old monitor before enable FGAC, the user field is empty - val monitor = randomADMonitor( - inputs = listOf(adSearchInput(detectorId)), triggers = listOf(adMonitorTrigger()), - user = User(user.name, listOf(), user.roles, user.customAttributes) - ) + val monitor = + randomADMonitor( + inputs = listOf(adSearchInput(detectorId)), + triggers = listOf(adMonitorTrigger()), + user = User(user.name, listOf(), user.roles, user.customAttributes), + ) val response = executeMonitor(monitor, params = DRYRUN_MONITOR) val output = entityAsMap(response) @Suppress("UNCHECKED_CAST") - (output["trigger_results"] as HashMap).forEach { - _, v -> + (output["trigger_results"] as HashMap).forEach { _, v -> assertTrue((v as HashMap)["triggered"] as Boolean) } assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val total = searchResult.stringMap("hits")?.get("total") as Map assertEquals("Incorrect search result", 5, total["value"]) @@ -1104,12 +1171,12 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val response = executeMonitor(monitor, params = DRYRUN_MONITOR) val output = entityAsMap(response) @Suppress("UNCHECKED_CAST") - (output["trigger_results"] as HashMap).forEach { - _, v -> + (output["trigger_results"] as HashMap).forEach { _, v -> assertTrue((v as HashMap)["triggered"] as Boolean) } @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val total = searchResult.stringMap("hits")?.get("total") as Map assertEquals("Incorrect search result", 5, total["value"]) @@ -1126,19 +1193,21 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val user = randomUser() prepareTestAnomalyResult(detectorId, user) // Test monitor with different user - val monitor = randomADMonitor( - inputs = listOf(adSearchInput(detectorId)), - triggers = listOf(adMonitorTrigger()), user = randomUser() - ) + val monitor = + randomADMonitor( + inputs = listOf(adSearchInput(detectorId)), + triggers = listOf(adMonitorTrigger()), + user = randomUser(), + ) val response = executeMonitor(monitor, params = DRYRUN_MONITOR) val output = entityAsMap(response) @Suppress("UNCHECKED_CAST") - (output["trigger_results"] as HashMap).forEach { - _, v -> + (output["trigger_results"] as HashMap).forEach { _, v -> assertTrue((v as HashMap)["triggered"] as Boolean) } @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val total = searchResult.stringMap("hits")?.get("total") as Map assertEquals("Incorrect search result", 5, total["value"]) @@ -1152,33 +1221,39 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { listOf( "test_value_1", "test_value_1", // adding duplicate to verify aggregation - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) val response = executeMonitor(monitor.id, params = DRYRUN_MONITOR) val output = entityAsMap(response) @@ -1186,6 +1261,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> assertEquals("Incorrect search result", 2, buckets.size) @@ -1195,13 +1271,14 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val skipIndex = createTestIndex("to_skip_index") val previousIndex = createTestIndex("to_include_index") - val indexMapping = """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } - """.trimIndent() + val indexMapping = + """ + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } + """.trimIndent() val alias = createTestAlias(randomAlphaOfLength(10), 10, true, indexMapping) val aliasName = alias.keys.first() insertSampleTimeSerializedDataCurrentTime( @@ -1209,50 +1286,56 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { listOf( "test_value_1", "test_value_1", // adding duplicate to verify aggregation - "test_value_2" - ) + "test_value_2", + ), ) insertSampleTimeSerializedDataWithTime( previousIndex, listOf( "test_value_3", "test_value_4", - "test_value_5" - ) + "test_value_5", + ), ) insertSampleTimeSerializedDataWithTime( skipIndex, listOf( "test_value_6", "test_value_7", - "test_value_8" - ) + "test_value_8", + ), ) addIndexToAlias(previousIndex, aliasName) addIndexToAlias(skipIndex, aliasName) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10s") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10s") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(aliasName), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) val response = executeMonitor(monitor.id, params = DRYRUN_MONITOR) val output = entityAsMap(response) @@ -1260,6 +1343,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> Assert.assertEquals(buckets.size, 8) @@ -1274,27 +1358,28 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { listOf( "test_value_3", "test_value_4", - "test_value_5" + "test_value_5", ), - ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS).plusSeconds(10) + ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS).plusSeconds(10), ) insertSampleTimeSerializedDataWithTime( skipIndex, listOf( "test_value_6", "test_value_7", - "test_value_8" + "test_value_8", ), - ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS).plusSeconds(10) + ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS).plusSeconds(10), ) Thread.sleep(10000) - val indexMapping = """ - "properties" : { - "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, - "test_field" : { "type" : "keyword" }, - "number" : { "type" : "keyword" } - } - """.trimIndent() + val indexMapping = + """ + "properties" : { + "test_strict_date_time" : { "type" : "date", "format" : "strict_date_time" }, + "test_field" : { "type" : "keyword" }, + "number" : { "type" : "keyword" } + } + """.trimIndent() val alias = createTestAlias(randomAlphaOfLength(10), 10, true, indexMapping) val aliasName = alias.keys.first() insertSampleTimeSerializedDataCurrentTime( @@ -1302,34 +1387,40 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { listOf( "test_value_1", "test_value_1", // adding duplicate to verify aggregation - "test_value_2" - ) + "test_value_2", + ), ) addIndexToAlias(previousIndex, aliasName) addIndexToAlias(skipIndex, aliasName) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10s") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10s") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(aliasName), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) val response = executeMonitor(monitor.id, params = DRYRUN_MONITOR) val output = entityAsMap(response) @@ -1337,6 +1428,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val buckets = searchResult.stringMap("aggregations")?.stringMap("composite_agg")?.get("buckets") as List> Assert.assertTrue(buckets.size <= 5) @@ -1347,64 +1439,71 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { indexDoc( index, "1", - """{"user_id": "1", + """ + {"user_id": "1", "ip_addr": "12345678", "user_agent": "chrome" } - """.trimIndent() + """.trimIndent(), ) indexDoc( index, "2", - """{"user_id": "2", + """ + {"user_id": "2", "ip_addr": "12345678", "user_agent": "chrome" } - """.trimIndent() + """.trimIndent(), ) indexDoc( index, "3", - """{"user_id": "2", + """ + {"user_id": "2", "ip_addr": "3443534", "user_agent": "chrome" } - """.trimIndent() + """.trimIndent(), ) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("_value" to "distinct_user_count", "docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "hot", - filter = null - ) - ) - - val m = randomBucketLevelMonitor( - triggers = listOf(trigger), - inputs = listOf( - SearchInput( - listOf(index), - SearchSourceBuilder().aggregation( - MultiTermsAggregationBuilder("hot") - .terms( - listOf( - MultiTermsValuesSourceConfig.Builder().setFieldName("ip_addr.keyword").build(), - MultiTermsValuesSourceConfig.Builder().setFieldName("user_agent.keyword").build() - ) - ) - .subAggregation(CardinalityAggregationBuilder("distinct_user_count").field("user_id.keyword")) - ) - ) + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("_value" to "distinct_user_count", "docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "hot", + filter = null, + ), + ) + + val m = + randomBucketLevelMonitor( + triggers = listOf(trigger), + inputs = + listOf( + SearchInput( + listOf(index), + SearchSourceBuilder().aggregation( + MultiTermsAggregationBuilder("hot") + .terms( + listOf( + MultiTermsValuesSourceConfig.Builder().setFieldName("ip_addr.keyword").build(), + MultiTermsValuesSourceConfig.Builder().setFieldName("user_agent.keyword").build(), + ), + ).subAggregation(CardinalityAggregationBuilder("distinct_user_count").field("user_id.keyword")), + ), + ), + ), ) - ) val monitor = createMonitor(m) val response = executeMonitor(monitor.id, params = DRYRUN_MONITOR) val output = entityAsMap(response) @@ -1412,16 +1511,23 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals(monitor.name, output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val buckets = searchResult.stringMap("aggregations")?.stringMap("hot")?.get("buckets") as List> assertEquals("Incorrect search result", 2, buckets.size) - val distinctUserCountAgg1 = buckets.find { - it.get("key_as_string") == "12345678|chrome" - }!!.get("distinct_user_count") as Map + val distinctUserCountAgg1 = + buckets + .find { + it.get("key_as_string") == "12345678|chrome" + }!! + .get("distinct_user_count") as Map assertEquals(2, distinctUserCountAgg1.get("value")) - val distinctUserCountAgg2 = buckets.find { - it.get("key_as_string") == "3443534|chrome" - }!!.get("distinct_user_count") as Map + val distinctUserCountAgg2 = + buckets + .find { + it.get("key_as_string") == "3443534|chrome" + }!! + .get("distinct_user_count") as Map assertEquals(1, distinctUserCountAgg2.get("value")) } @@ -1432,33 +1538,39 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { listOf( "test_value_1", "test_value_1", // adding duplicate to verify aggregation - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) executeMonitor(monitor.id) @@ -1477,8 +1589,8 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "1", // test_value_1 - "2" // test_value_1 - ) + "2", // test_value_1 + ), ) // Execute monitor again @@ -1498,47 +1610,55 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "test_value_1", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() // For the Actions ensure that there is at least one and any PER_ALERT actions contain ACTIVE, DEDUPED and COMPLETED in its policy // so that the assertions done later in this test don't fail. // The config is being mutated this way to still maintain the randomness in configuration (like including other ActionExecutionScope). - val actions = randomActionsForBucketLevelTrigger(min = 1).map { - if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { - it.copy( - actionExecutionPolicy = ActionExecutionPolicy( - PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)) + val actions = + randomActionsForBucketLevelTrigger(min = 1).map { + if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { + it.copy( + actionExecutionPolicy = + ActionExecutionPolicy( + PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)), + ), ) - ) - } else { - it + } else { + it + } } - } var trigger = randomBucketLevelTrigger(actions = actions) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) executeMonitor(monitor.id) @@ -1575,8 +1695,8 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "1", // test_value_1 - "2" // test_value_2 - ) + "2", // test_value_2 + ), ) // Execute Monitor and check that both Alerts were updated @@ -1591,11 +1711,11 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val previouslyActiveAlert = completedAlerts.single { it.aggregationResultBucket?.getBucketKeysHash().equals("test_value_2") } assertTrue( "Previously acknowledged alert was not updated when it moved to completed", - previouslyAcknowledgedAlert.lastNotificationTime!! > acknowledgedAlert2.lastNotificationTime + previouslyAcknowledgedAlert.lastNotificationTime!! > acknowledgedAlert2.lastNotificationTime, ) assertTrue( "Previously active alert was not updated when it moved to completed", - previouslyActiveAlert.lastNotificationTime!! > activeAlert2.lastNotificationTime + previouslyActiveAlert.lastNotificationTime!! > activeAlert2.lastNotificationTime, ) } @@ -1605,52 +1725,60 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "test_value_1", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") val termAgg = TermsAggregationBuilder("test_field").field("test_field") val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(termAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() // For the Actions ensure that there is at least one and any PER_ALERT actions contain ACTIVE, DEDUPED and COMPLETED in its policy // so that the assertions done later in this test don't fail. // The config is being mutated this way to still maintain the randomness in configuration (like including other ActionExecutionScope). - val actions = randomActionsForBucketLevelTrigger(min = 1).map { - if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { - it.copy( - actionExecutionPolicy = ActionExecutionPolicy( - PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)) + val actions = + randomActionsForBucketLevelTrigger(min = 1).map { + if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { + it.copy( + actionExecutionPolicy = + ActionExecutionPolicy( + PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)), + ), ) - ) - } else { - it + } else { + it + } } - } var trigger = randomBucketLevelTrigger(actions = actions) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "test_field", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "test_field", + filter = null, + ), + ) + val monitor = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources(findingsEnabled = true), + ), ) - ) - val monitor = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources(findingsEnabled = true) - ) - ) executeMonitor(monitor.id) // Check created Alerts @@ -1671,55 +1799,64 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "test_value_1", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() // For the Actions ensure that there is at least one and any PER_ALERT actions contain ACTIVE, DEDUPED and COMPLETED in its policy // so that the assertions done later in this test don't fail. // The config is being mutated this way to still maintain the randomness in configuration (like including other ActionExecutionScope). - val actions = randomActionsForBucketLevelTrigger(min = 1).map { - if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { - it.copy( - actionExecutionPolicy = ActionExecutionPolicy( - PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)) + val actions = + randomActionsForBucketLevelTrigger(min = 1).map { + if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { + it.copy( + actionExecutionPolicy = + ActionExecutionPolicy( + PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)), + ), ) - ) - } else { - it + } else { + it + } } - } var trigger = randomBucketLevelTrigger(actions = actions) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null - ) - ) - val monitor = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources(findingsEnabled = true) + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), + ) + val monitor = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources(findingsEnabled = true), + ), ) - ) executeMonitor(monitor.id) // Check created Alerts @@ -1740,56 +1877,65 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "test_value_1", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field"), - TermsValuesSourceBuilder("number").field("number") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + TermsValuesSourceBuilder("number").field("number"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() // For the Actions ensure that there is at least one and any PER_ALERT actions contain ACTIVE, DEDUPED and COMPLETED in its policy // so that the assertions done later in this test don't fail. // The config is being mutated this way to still maintain the randomness in configuration (like including other ActionExecutionScope). - val actions = randomActionsForBucketLevelTrigger(min = 1).map { - if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { - it.copy( - actionExecutionPolicy = ActionExecutionPolicy( - PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)) + val actions = + randomActionsForBucketLevelTrigger(min = 1).map { + if (it.actionExecutionPolicy?.actionExecutionScope is PerAlertActionScope) { + it.copy( + actionExecutionPolicy = + ActionExecutionPolicy( + PerAlertActionScope(setOf(AlertCategory.NEW, AlertCategory.DEDUPED, AlertCategory.COMPLETED)), + ), ) - ) - } else { - it + } else { + it + } } - } var trigger = randomBucketLevelTrigger(actions = actions) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), + ) + val monitor = + createMonitor( + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources(findingsEnabled = true), + ), ) - ) - val monitor = createMonitor( - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources(findingsEnabled = true) - ) - ) executeMonitor(monitor.id) // Check created Alerts @@ -1812,42 +1958,49 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { "test_value_1", "test_value_3", "test_value_2", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Trigger script should only create Alerts for 'test_value_1' and 'test_value_2' - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() val goodAction = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val syntaxErrorAction = randomAction( - name = "bad syntax", - template = randomTemplateScript("{{foo"), - destinationId = createDestination().id - ) + val syntaxErrorAction = + randomAction( + name = "bad syntax", + template = randomTemplateScript("{{foo"), + destinationId = createDestination().id, + ) val actions = listOf(goodAction, syntaxErrorAction) var trigger = randomBucketLevelTrigger(actions = actions) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) val output = entityAsMap(executeMonitor(monitor.id)) @@ -1890,39 +2043,46 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { "test_value_1", "test_value_3", "test_value_2", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) // Trigger script should only create Alerts for 'test_value_1' and 'test_value_2' - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() - val action = randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = ActionExecutionPolicy(PerExecutionActionScope()) - ) + val action = + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + actionExecutionPolicy = ActionExecutionPolicy(PerExecutionActionScope()), + ) var trigger = randomBucketLevelTrigger(actions = listOf(action)) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) val output = entityAsMap(executeMonitor(monitor.id)) @@ -1958,38 +2118,45 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { "test_value_1", "test_value_1", "test_value_2", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() - val action = randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - actionExecutionPolicy = ActionExecutionPolicy(PerAlertActionScope(setOf(AlertCategory.DEDUPED, AlertCategory.NEW))) - ) + val action = + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + actionExecutionPolicy = ActionExecutionPolicy(PerAlertActionScope(setOf(AlertCategory.DEDUPED, AlertCategory.NEW))), + ) var trigger = randomBucketLevelTrigger(actions = listOf(action)) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) executeMonitor(monitor.id) @@ -2007,8 +2174,8 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { "1", // test_value_1 "2", // test_value_1 "3", // test_value_2 - "4" // test_value_2 - ) + "4", // test_value_2 + ), ) // Execute Monitor and check that both Alerts were moved to COMPLETED @@ -2025,62 +2192,73 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { testIndex, listOf( "test_value_1", - "test_value_2" - ) + "test_value_2", + ), ) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() - val actionThrottleEnabled = randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - throttleEnabled = true, - throttle = Throttle(value = 5, unit = MINUTES), - actionExecutionPolicy = ActionExecutionPolicy( - actionExecutionScope = PerAlertActionScope(setOf(AlertCategory.DEDUPED, AlertCategory.NEW)) - ) - ) - val actionThrottleNotEnabled = randomActionWithPolicy( - template = randomTemplateScript("Hello {{ctx.monitor.name}}"), - destinationId = createDestination().id, - throttleEnabled = false, - throttle = Throttle(value = 5, unit = MINUTES), - actionExecutionPolicy = ActionExecutionPolicy( - actionExecutionScope = PerAlertActionScope(setOf(AlertCategory.DEDUPED, AlertCategory.NEW)) + val actionThrottleEnabled = + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + throttleEnabled = true, + throttle = Throttle(value = 5, unit = MINUTES), + actionExecutionPolicy = + ActionExecutionPolicy( + actionExecutionScope = PerAlertActionScope(setOf(AlertCategory.DEDUPED, AlertCategory.NEW)), + ), + ) + val actionThrottleNotEnabled = + randomActionWithPolicy( + template = randomTemplateScript("Hello {{ctx.monitor.name}}"), + destinationId = createDestination().id, + throttleEnabled = false, + throttle = Throttle(value = 5, unit = MINUTES), + actionExecutionPolicy = + ActionExecutionPolicy( + actionExecutionScope = PerAlertActionScope(setOf(AlertCategory.DEDUPED, AlertCategory.NEW)), + ), ) - ) val actions = listOf(actionThrottleEnabled, actionThrottleNotEnabled) var trigger = randomBucketLevelTrigger(actions = actions) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) val monitorRunResultNotThrottled = entityAsMap(executeMonitor(monitor.id)) verifyActionThrottleResultsForBucketLevelMonitor( monitorRunResult = monitorRunResultNotThrottled, expectedEvents = setOf("test_value_1", "test_value_2"), - expectedActionResults = mapOf( - Pair(actionThrottleEnabled.id, false), - Pair(actionThrottleNotEnabled.id, false) - ) + expectedActionResults = + mapOf( + Pair(actionThrottleEnabled.id, false), + Pair(actionThrottleNotEnabled.id, false), + ), ) val notThrottledAlerts = searchAlerts(monitor) @@ -2088,10 +2266,11 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val previousAlertExecutionTime: MutableMap> = mutableMapOf() notThrottledAlerts.forEach { verifyAlert(it, monitor, ACTIVE) - val notThrottledActionResults = verifyActionExecutionResultInAlert( - it, - mutableMapOf(Pair(actionThrottleEnabled.id, 0), Pair(actionThrottleNotEnabled.id, 0)) - ) + val notThrottledActionResults = + verifyActionExecutionResultInAlert( + it, + mutableMapOf(Pair(actionThrottleEnabled.id, 0), Pair(actionThrottleNotEnabled.id, 0)), + ) assertEquals(notThrottledActionResults.size, 2) // Save the lastExecutionTimes of the actions for the Alert to be compared later against @@ -2112,20 +2291,22 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { verifyActionThrottleResultsForBucketLevelMonitor( monitorRunResult = monitorRunResultThrottled, expectedEvents = setOf("test_value_1", "test_value_2"), - expectedActionResults = mapOf( - Pair(actionThrottleEnabled.id, true), - Pair(actionThrottleNotEnabled.id, false) - ) + expectedActionResults = + mapOf( + Pair(actionThrottleEnabled.id, true), + Pair(actionThrottleNotEnabled.id, false), + ), ) val throttledAlerts = searchAlerts(monitor) assertEquals("Alerts may not have been saved correctly", 2, throttledAlerts.size) throttledAlerts.forEach { verifyAlert(it, monitor, ACTIVE) - val throttledActionResults = verifyActionExecutionResultInAlert( - it, - mutableMapOf(Pair(actionThrottleEnabled.id, 1), Pair(actionThrottleNotEnabled.id, 0)) - ) + val throttledActionResults = + verifyActionExecutionResultInAlert( + it, + mutableMapOf(Pair(actionThrottleEnabled.id, 1), Pair(actionThrottleNotEnabled.id, 0)), + ) assertEquals(throttledActionResults.size, 2) val prevthrottledActionLastExecutionTime = previousAlertExecutionTime[it.id]!![actionThrottleEnabled.id] @@ -2133,11 +2314,11 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { assertEquals( "Last execution time of a throttled action was updated for one of the Alerts", prevthrottledActionLastExecutionTime, - throttledActionResults[actionThrottleEnabled.id]!!.lastExecutionTime + throttledActionResults[actionThrottleEnabled.id]!!.lastExecutionTime, ) assertTrue( "Last execution time of a non-throttled action was not updated for one of the Alerts", - throttledActionResults[actionThrottleNotEnabled.id]!!.lastExecutionTime!! > prevNotThrottledActionLastExecutionTime + throttledActionResults[actionThrottleNotEnabled.id]!!.lastExecutionTime!! > prevNotThrottledActionLastExecutionTime, ) } } @@ -2149,8 +2330,8 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { listOf( "test_value_1", "test_value_1", - "test_value_2" - ) + "test_value_2", + ), ) val messageSource = "{{#ctx.newAlerts}}\n{{#sample_documents}}\n (docId={{_id}}) \n{{/sample_documents}}\n{{/ctx.newAlerts}}" @@ -2161,33 +2342,40 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { return@waitUntil false }, 200, TimeUnit.MILLISECONDS) - val query = QueryBuilders.rangeQuery("test_strict_date_time") - .gt("{{period_end}}||-10d") - .lte("{{period_end}}") - .format("epoch_millis") - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val query = + QueryBuilders + .rangeQuery("test_strict_date_time") + .gt("{{period_end}}||-10d") + .lte("{{period_end}}") + .format("epoch_millis") + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) val input = SearchInput(indices = listOf(testIndex), query = SearchSourceBuilder().size(0).query(query).aggregation(compositeAgg)) - val triggerScript = """ + val triggerScript = + """ params.docCount > 1 - """.trimIndent() + """.trimIndent() - val action = randomAction( - template = randomTemplateScript(source = messageSource), - destinationId = createDestination().id - ) + val action = + randomAction( + template = randomTemplateScript(source = messageSource), + destinationId = createDestination().id, + ) var trigger = randomBucketLevelTrigger(actions = listOf(action)) - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), ) - ) val monitor = createMonitor(randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger))) val output = entityAsMap(executeMonitor(monitor.id)) @@ -2203,17 +2391,22 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val actionOutput = actionResult["output"] as Map if (actionResult["name"] == action.name) { when (alertEvent.key) { - "test_value_1" -> bucket1DocIds.forEach { docEntry -> - assertTrue( - "The notification message is missing docEntry $docEntry", - !actionOutput["message"].isNullOrEmpty() && actionOutput["message"]!!.contains(docEntry) - ) + "test_value_1" -> { + bucket1DocIds.forEach { docEntry -> + assertTrue( + "The notification message is missing docEntry $docEntry", + !actionOutput["message"].isNullOrEmpty() && actionOutput["message"]!!.contains(docEntry), + ) + } } - "test_value_2" -> bucket2DocIds.forEach { docEntry -> - assertTrue( - "The notification message is missing docEntry $docEntry", - !actionOutput["message"].isNullOrEmpty() && actionOutput["message"]!!.contains(docEntry) - ) + + "test_value_2" -> { + bucket2DocIds.forEach { docEntry -> + assertTrue( + "The notification message is missing docEntry $docEntry", + !actionOutput["message"].isNullOrEmpty() && actionOutput["message"]!!.contains(docEntry), + ) + } } } } else { @@ -2224,7 +2417,10 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { } } - private fun prepareTestAnomalyResult(detectorId: String, user: User) { + private fun prepareTestAnomalyResult( + detectorId: String, + user: User, + ) { val adResultIndex = ".opendistro-anomaly-results-history-2020.10.17" try { createTestIndex(adResultIndex, anomalyResultIndexMapping()) @@ -2235,37 +2431,52 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { val twoMinsAgo = ZonedDateTime.now().minus(2, MINUTES).truncatedTo(MILLIS) val testTime = twoMinsAgo.toEpochSecond() * 1000 - val testResult1 = randomAnomalyResult( - detectorId = detectorId, executionEndTime = testTime, user = user, - anomalyGrade = 0.1 - ) + val testResult1 = + randomAnomalyResult( + detectorId = detectorId, + executionEndTime = testTime, + user = user, + anomalyGrade = 0.1, + ) indexDoc(adResultIndex, "1", testResult1) - val testResult2 = randomAnomalyResult( - detectorId = detectorId, executionEndTime = testTime, user = user, - anomalyGrade = 0.8 - ) + val testResult2 = + randomAnomalyResult( + detectorId = detectorId, + executionEndTime = testTime, + user = user, + anomalyGrade = 0.8, + ) indexDoc(adResultIndex, "2", testResult2) - val testResult3 = randomAnomalyResult( - detectorId = detectorId, executionEndTime = testTime, user = user, - anomalyGrade = 0.5 - ) + val testResult3 = + randomAnomalyResult( + detectorId = detectorId, + executionEndTime = testTime, + user = user, + anomalyGrade = 0.5, + ) indexDoc(adResultIndex, "3", testResult3) - val testResult4 = randomAnomalyResult( - detectorId = detectorId, executionEndTime = testTime, - user = User(user.name, listOf(), user.roles, user.customAttributes), - anomalyGrade = 0.9 - ) + val testResult4 = + randomAnomalyResult( + detectorId = detectorId, + executionEndTime = testTime, + user = User(user.name, listOf(), user.roles, user.customAttributes), + anomalyGrade = 0.9, + ) indexDoc(adResultIndex, "4", testResult4) // User is null - val testResult5 = randomAnomalyResultWithoutUser( - detectorId = detectorId, executionEndTime = testTime, - anomalyGrade = 0.75 - ) + val testResult5 = + randomAnomalyResultWithoutUser( + detectorId = detectorId, + executionEndTime = testTime, + anomalyGrade = 0.75, + ) indexDoc(adResultIndex, "5", testResult5) } - private fun verifyActionExecutionResultInAlert(alert: Alert, expectedResult: Map): - MutableMap { + private fun verifyActionExecutionResultInAlert( + alert: Alert, + expectedResult: Map, + ): MutableMap { val actionResult = mutableMapOf() for (result in alert.actionExecutionResults) { val expected = expectedResult[result.actionId] @@ -2275,7 +2486,10 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { return actionResult } - private fun verifyActionThrottleResults(output: MutableMap, expectedResult: Map) { + private fun verifyActionThrottleResults( + output: MutableMap, + expectedResult: Map, + ) { for (triggerResult in output.objectMap("trigger_results").values) { for (actionResult in triggerResult.objectMap("action_results").values) { val expected = expectedResult[actionResult["id"]] @@ -2288,7 +2502,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { private fun verifyActionThrottleResultsForBucketLevelMonitor( monitorRunResult: MutableMap, expectedEvents: Set, - expectedActionResults: Map + expectedActionResults: Map, ) { for (triggerResult in monitorRunResult.objectMap("trigger_results").values) { for (alertEvent in triggerResult.objectMap("action_results")) { @@ -2306,7 +2520,7 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { alert: Alert, monitor: Monitor, expectedState: State = ACTIVE, - expectNotification: Boolean = true + expectNotification: Boolean = true, ) { assertNotNull(alert.id) assertNotNull(alert.startTime) @@ -2334,15 +2548,18 @@ class MonitorRunnerServiceIT : AlertingRestTestCase() { } @Suppress("UNCHECKED_CAST") - /** helper that returns a field in a json map whose values are all json objects */ - private fun Map.objectMap(key: String): Map> { - return this[key] as Map> - } - - fun addIndexToAlias(index: String, alias: String) { + // helper that returns a field in a json map whose values are all json objects + private fun Map.objectMap( + key: String, + ): Map> = this[key] as Map> + + fun addIndexToAlias( + index: String, + alias: String, + ) { val request = Request("POST", "/_aliases") request.setJsonEntity( - """{"actions": [{"add": {"index": "$index","alias": "$alias"}} ]}""".trimIndent() + """{"actions": [{"add": {"index": "$index","alias": "$alias"}} ]}""".trimIndent(), ) try { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt index f6ed78541..7b214ae81 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/MonitorTests.kt @@ -9,9 +9,10 @@ import org.opensearch.commons.alerting.model.Trigger import org.opensearch.test.OpenSearchTestCase import java.lang.IllegalArgumentException import java.time.Instant +import kotlin.test.Test class MonitorTests : OpenSearchTestCase() { - + @Test fun `test enabled time`() { val monitor = randomQueryLevelMonitor() val enabledMonitor = monitor.copy(enabled = true, enabledTime = Instant.now()) @@ -30,6 +31,7 @@ class MonitorTests : OpenSearchTestCase() { } } + @Test fun `test max triggers`() { val monitor = randomQueryLevelMonitor() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/ODFERestTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/ODFERestTestCase.kt index e4d063ce2..9b6e6fbd5 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/ODFERestTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/ODFERestTestCase.kt @@ -36,14 +36,9 @@ import java.io.IOException */ abstract class ODFERestTestCase : OpenSearchRestTestCase() { + fun isHttps(): Boolean = System.getProperty("https", "false")!!.toBoolean() - fun isHttps(): Boolean { - return System.getProperty("https", "false")!!.toBoolean() - } - - fun securityEnabled(): Boolean { - return System.getProperty("security", "false")!!.toBoolean() - } + fun securityEnabled(): Boolean = System.getProperty("security", "false")!!.toBoolean() @Suppress("UNCHECKED_CAST") fun isNotificationPluginInstalled(): Boolean { @@ -60,17 +55,14 @@ abstract class ODFERestTestCase : OpenSearchRestTestCase() { return false } - override fun getProtocol(): String { - return if (isHttps()) { + override fun getProtocol(): String = + if (isHttps()) { "https" } else { "http" } - } - override fun preserveIndicesUponCompletion(): Boolean { - return true - } + override fun preserveIndicesUponCompletion(): Boolean = true open fun preserveODFEIndicesAfterTest(): Boolean = false @@ -82,31 +74,34 @@ abstract class ODFERestTestCase : OpenSearchRestTestCase() { val response = client().performRequest(Request("GET", "/_cat/indices?format=json&expand_wildcards=all")) val xContentType = MediaType.fromMediaType(response.entity.contentType) - xContentType.xContent().createParser( - NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - response.entity.content - ).use { parser -> - for (index in parser.list()) { - val jsonObject: Map<*, *> = index as java.util.HashMap<*, *> - val indexName: String = jsonObject["index"] as String - // .opendistro_security isn't allowed to delete from cluster - if (".opendistro_security" != indexName) { - var request = Request("DELETE", "/$indexName") - // TODO: remove PERMISSIVE option after moving system index access to REST API call - val options = RequestOptions.DEFAULT.toBuilder() - options.setWarningsHandler(WarningsHandler.PERMISSIVE) - request.options = options.build() - adminClient().performRequest(request) + xContentType + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.entity.content, + ).use { parser -> + for (index in parser.list()) { + val jsonObject: Map<*, *> = index as java.util.HashMap<*, *> + val indexName: String = jsonObject["index"] as String + // .opendistro_security isn't allowed to delete from cluster + if (".opendistro_security" != indexName) { + var request = Request("DELETE", "/$indexName") + // TODO: remove PERMISSIVE option after moving system index access to REST API call + val options = RequestOptions.DEFAULT.toBuilder() + options.setWarningsHandler(WarningsHandler.PERMISSIVE) + request.options = options.build() + adminClient().performRequest(request) + } } } - } } /** * Returns the REST client settings used for super-admin actions like cleaning up after the test has completed. */ - override fun restAdminSettings(): Settings { - return Settings + override fun restAdminSettings(): Settings = + Settings .builder() .put("http.port", 9200) .put(OPENSEARCH_SECURITY_SSL_HTTP_ENABLED, isHttps()) @@ -115,10 +110,12 @@ abstract class ODFERestTestCase : OpenSearchRestTestCase() { .put(OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_PASSWORD, "changeit") .put(OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_KEYPASSWORD, "changeit") .build() - } @Throws(IOException::class) - override fun buildClient(settings: Settings, hosts: Array): RestClient { + override fun buildClient( + settings: Settings, + hosts: Array, + ): RestClient { if (securityEnabled()) { val keystore = settings.get(OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_FILEPATH) return when (keystore != null) { @@ -131,6 +128,7 @@ abstract class ODFERestTestCase : OpenSearchRestTestCase() { .setConnectionRequestTimeout(180000) .build() } + false -> { // create client with passed user val userName = System.getProperty("user") diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/PPLSQLMonitorRunnerIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/PPLSQLMonitorRunnerIT.kt index 070073994..71e891c4e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/PPLSQLMonitorRunnerIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/PPLSQLMonitorRunnerIT.kt @@ -36,25 +36,27 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(2, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = null, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = null, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) // set the monitor execution timebox to 1 nanosecond to guarantee a timeout client().updateSettings(AlertingSettings.ALERT_V2_MONITOR_EXECUTION_MAX_DURATION.key, TimeValue.timeValueNanos(1L)) @@ -75,25 +77,27 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(2, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = null, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = null, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val versionBefore = pplMonitor.version @@ -118,25 +122,27 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { indexDocFromSomeTimeAgo(2, MINUTES, "def", 10) indexDocFromSomeTimeAgo(3, MINUTES, "ghi", 7) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = null, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.PER_RESULT, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = null, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.PER_RESULT, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -147,7 +153,8 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { assert(triggered) { "Monitor should have triggered but it didn't" } assertEquals( "A number of alerts matching the number of docs ingested (3) should have been generated", - 3, alertsGenerated + 3, + alertsGenerated, ) } @@ -163,25 +170,27 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { indexDocFromSomeTimeAgo(8, MINUTES, "ghi", 8) indexDocFromSomeTimeAgo(9, MINUTES, "ghi", 9) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = null, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.CUSTOM, - customCondition = "eval result = max_num > 5", - numResultsCondition = null, - numResultsValue = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = null, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.CUSTOM, + customCondition = "eval result = max_num > 5", + numResultsCondition = null, + numResultsValue = null, + ), + ), + query = "source = $TEST_INDEX_NAME | stats max(number) as max_num by abc", ), - query = "source = $TEST_INDEX_NAME | stats max(number) as max_num by abc" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -205,25 +214,27 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { indexDocFromSomeTimeAgo(8, MINUTES, "ghi", 8) indexDocFromSomeTimeAgo(9, MINUTES, "ghi", 9) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = null, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.PER_RESULT, - conditionType = ConditionType.CUSTOM, - customCondition = "eval evaluation = max_num > 5", - numResultsCondition = null, - numResultsValue = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = null, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.PER_RESULT, + conditionType = ConditionType.CUSTOM, + customCondition = "eval evaluation = max_num > 5", + numResultsCondition = null, + numResultsValue = null, + ), + ), + query = "source = $TEST_INDEX_NAME | stats max(number) as max_num by abc", ), - query = "source = $TEST_INDEX_NAME | stats max(number) as max_num by abc" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -241,7 +252,8 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { assert(triggered) { "Monitor should have triggered but it didn't" } assertEquals( "A number of alerts matching the number of docs ingested (2) should have been generated", - 2, alertsGenerated + 2, + alertsGenerated, ) } @@ -249,26 +261,28 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(2, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = 5, - timestampField = TIMESTAMP_FIELD, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = 5, + timestampField = TIMESTAMP_FIELD, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -284,26 +298,28 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(10, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = 5, - timestampField = TIMESTAMP_FIELD, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = 5, + timestampField = TIMESTAMP_FIELD, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -319,24 +335,26 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 20, unit = MINUTES), - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 1L, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 20, unit = MINUTES), + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 1L, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -367,19 +385,20 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { randomPPLMonitor( enabled = true, schedule = IntervalSchedule(interval = 1, unit = MINUTES), - triggers = listOf( - randomPPLTrigger( - throttleDuration = 100L, - expireDuration = 1L, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) - ), - query = "source = $TEST_INDEX_NAME | head 10" - ) + triggers = + listOf( + randomPPLTrigger( + throttleDuration = 100L, + expireDuration = 1L, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", + ), ) // sleep briefly so scheduled job can generate the alert @@ -407,24 +426,26 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - triggers = listOf( - randomPPLTrigger( - throttleDuration = 10, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + triggers = + listOf( + randomPPLTrigger( + throttleDuration = 10, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -450,24 +471,26 @@ class PPLSQLMonitorRunnerIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 30, unit = MINUTES), - triggers = listOf( - randomPPLTrigger( - throttleDuration = 20, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 30, unit = MINUTES), + triggers = + listOf( + randomPPLTrigger( + throttleDuration = 20, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt index 75819d6aa..11b92d161 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TestHelpers.kt @@ -108,14 +108,20 @@ fun randomQueryLevelMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, - uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + withMetadata: Boolean = false, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), ) -} // Monitor of older versions without security. fun randomQueryLevelMonitorWithoutUser( @@ -126,64 +132,86 @@ fun randomQueryLevelMonitorWithoutUser( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = null, - uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + withMetadata: Boolean = false, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = null, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), ) -} fun randomBucketLevelMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), user: User = randomUser(), - inputs: List = listOf( - SearchInput( - emptyList(), - SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - .aggregation(TermsAggregationBuilder("test_agg").field("test_field")) - ) - ), + inputs: List = + listOf( + SearchInput( + emptyList(), + SearchSourceBuilder() + .query(QueryBuilders.matchAllQuery()) + .aggregation(TermsAggregationBuilder("test_agg").field("test_field")), + ), + ), schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), enabled: Boolean = randomBoolean(), triggers: List = (1..randomInt(10)).map { randomBucketLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, - uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + withMetadata: Boolean = false, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), ) -} fun randomBucketLevelMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), user: User = randomUser(), - inputs: List = listOf( - SearchInput( - emptyList(), - SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - .aggregation(TermsAggregationBuilder("test_agg").field("test_field")) - ) - ), + inputs: List = + listOf( + SearchInput( + emptyList(), + SearchSourceBuilder() + .query(QueryBuilders.matchAllQuery()) + .aggregation(TermsAggregationBuilder("test_agg").field("test_field")), + ), + ), schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), enabled: Boolean = randomBoolean(), triggers: List = (1..randomInt(10)).map { randomBucketLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), withMetadata: Boolean = false, - dataSources: DataSources -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, + dataSources: DataSources, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = user, uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), - dataSources = dataSources + dataSources = dataSources, ) -} fun randomClusterMetricsMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), @@ -194,14 +222,20 @@ fun randomClusterMetricsMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.CLUSTER_METRICS_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, - uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + withMetadata: Boolean = false, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.CLUSTER_METRICS_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), ) -} fun randomDocumentLevelMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), @@ -212,14 +246,20 @@ fun randomDocumentLevelMonitor( triggers: List = (1..randomInt(10)).map { randomQueryLevelTrigger() }, enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - withMetadata: Boolean = false -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, - uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + withMetadata: Boolean = false, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), ) -} fun randomDocumentLevelMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), @@ -233,15 +273,23 @@ fun randomDocumentLevelMonitor( withMetadata: Boolean = false, dataSources: DataSources, ignoreFindingsAndAlerts: Boolean? = false, - owner: String? = null -): Monitor { - return Monitor( - name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, - schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, - uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), dataSources = dataSources, - shouldCreateSingleAlertForFindings = ignoreFindingsAndAlerts, owner = owner + owner: String? = null, +): Monitor = + Monitor( + name = name, + monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR.value, + enabled = enabled, + inputs = inputs, + schedule = schedule, + triggers = triggers, + enabledTime = enabledTime, + lastUpdateTime = lastUpdateTime, + user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf(), + dataSources = dataSources, + shouldCreateSingleAlertForFindings = ignoreFindingsAndAlerts, + owner = owner, ) -} fun randomWorkflow( id: String = Workflow.NO_ID, @@ -253,14 +301,14 @@ fun randomWorkflow( enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), triggers: List = emptyList(), - auditDelegateMonitorAlerts: Boolean? = true + auditDelegateMonitorAlerts: Boolean? = true, ): Workflow { val delegates = mutableListOf() if (!monitorIds.isNullOrEmpty()) { delegates.add(Delegate(1, monitorIds[0])) for (i in 1 until monitorIds.size) { // Order of monitors in workflow will be the same like forwarded meaning that the first monitorId will be used as second monitor chained finding - delegates.add(Delegate(i + 1, monitorIds [i], ChainedMonitorFindings(monitorIds[i - 1]))) + delegates.add(Delegate(i + 1, monitorIds[i], ChainedMonitorFindings(monitorIds[i - 1]))) } } @@ -277,7 +325,7 @@ fun randomWorkflow( version = -1L, schemaVersion = 0, triggers = triggers, - auditDelegateMonitorAlerts = auditDelegateMonitorAlerts + auditDelegateMonitorAlerts = auditDelegateMonitorAlerts, ) } @@ -290,9 +338,9 @@ fun randomWorkflowWithDelegates( enabled: Boolean = randomBoolean(), enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - triggers: List = emptyList() -): Workflow { - return Workflow( + triggers: List = emptyList(), +): Workflow = + Workflow( id = id, name = name, enabled = enabled, @@ -304,9 +352,8 @@ fun randomWorkflowWithDelegates( inputs = listOf(CompositeInput(Sequence(delegates))), version = -1L, schemaVersion = 0, - triggers = triggers + triggers = triggers, ) -} fun randomPPLMonitor( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), @@ -320,9 +367,9 @@ fun randomPPLMonitor( triggers: List = List(randomIntBetween(1, 5)) { randomPPLTrigger() }, user: User? = randomUser(), queryLanguage: QueryLanguage = QueryLanguage.PPL, - query: String = "source = $TEST_INDEX_NAME | head 10" -): PPLSQLMonitor { - return PPLSQLMonitor( + query: String = "source = $TEST_INDEX_NAME | head 10", +): PPLSQLMonitor = + PPLSQLMonitor( name = name, enabled = enabled, schedule = schedule, @@ -334,9 +381,8 @@ fun randomPPLMonitor( triggers = triggers, user = user, queryLanguage = queryLanguage, - query = query + query = query, ) -} fun randomQueryLevelTrigger( id: String = UUIDs.base64UUID(), @@ -344,16 +390,15 @@ fun randomQueryLevelTrigger( severity: String = "1", condition: Script = randomScript(), actions: List = mutableListOf(), - destinationId: String = "" -): QueryLevelTrigger { - return QueryLevelTrigger( + destinationId: String = "", +): QueryLevelTrigger = + QueryLevelTrigger( id = id, name = name, severity = severity, condition = condition, - actions = if (actions.isEmpty()) (0..randomInt(10)).map { randomAction(destinationId = destinationId) } else actions + actions = if (actions.isEmpty()) (0..randomInt(10)).map { randomAction(destinationId = destinationId) } else actions, ) -} fun randomBucketLevelTrigger( id: String = UUIDs.base64UUID(), @@ -361,19 +406,21 @@ fun randomBucketLevelTrigger( severity: String = "1", bucketSelector: BucketSelectorExtAggregationBuilder = randomBucketSelectorExtAggregationBuilder(name = id), actions: List = mutableListOf(), - destinationId: String = "" -): BucketLevelTrigger { - return BucketLevelTrigger( + destinationId: String = "", +): BucketLevelTrigger = + BucketLevelTrigger( id = id, name = name, severity = severity, bucketSelector = bucketSelector, - actions = if (actions.isEmpty()) randomActionsForBucketLevelTrigger(destinationId = destinationId) else actions + actions = if (actions.isEmpty()) randomActionsForBucketLevelTrigger(destinationId = destinationId) else actions, ) -} -fun randomActionsForBucketLevelTrigger(min: Int = 0, max: Int = 10, destinationId: String = ""): List = - (min..randomInt(max)).map { randomActionWithPolicy(destinationId = destinationId) } +fun randomActionsForBucketLevelTrigger( + min: Int = 0, + max: Int = 10, + destinationId: String = "", +): List = (min..randomInt(max)).map { randomActionWithPolicy(destinationId = destinationId) } fun randomDocumentLevelTrigger( id: String = UUIDs.base64UUID(), @@ -381,18 +428,20 @@ fun randomDocumentLevelTrigger( severity: String = "1", condition: Script = randomScript(), actions: List = mutableListOf(), - destinationId: String = "" -): DocumentLevelTrigger { - return DocumentLevelTrigger( + destinationId: String = "", +): DocumentLevelTrigger = + DocumentLevelTrigger( id = id, name = name, severity = severity, condition = condition, - actions = if (actions.isEmpty() && destinationId.isNotBlank()) - (0..randomInt(10)).map { randomAction(destinationId = destinationId) } - else actions + actions = + if (actions.isEmpty() && destinationId.isNotBlank()) { + (0..randomInt(10)).map { randomAction(destinationId = destinationId) } + } else { + actions + }, ) -} // random PPLTrigger defaults to a number_of_results trigger, because a custom condition // would require knowledge of the PPL Monitor's query @@ -412,9 +461,9 @@ fun randomPPLTrigger( conditionType: ConditionType = ConditionType.NUMBER_OF_RESULTS, numResultsCondition: NumResultsCondition? = NumResultsCondition.entries.random(), numResultsValue: Long? = randomLongBetween(1L, 50L), - customCondition: String? = null -): PPLSQLTrigger { - return PPLSQLTrigger( + customCondition: String? = null, +): PPLSQLTrigger = + PPLSQLTrigger( id = id, name = name, severity = severity, @@ -426,26 +475,21 @@ fun randomPPLTrigger( conditionType = conditionType, numResultsCondition = numResultsCondition, numResultsValue = numResultsValue, - customCondition = customCondition + customCondition = customCondition, ) -} fun randomBucketSelectorExtAggregationBuilder( name: String = OpenSearchRestTestCase.randomAlphaOfLength(10), bucketsPathsMap: MutableMap = mutableMapOf("avg" to "10"), script: Script = randomBucketSelectorScript(params = bucketsPathsMap), parentBucketPath: String = "testPath", - filter: BucketSelectorExtFilter = BucketSelectorExtFilter(IncludeExclude("foo*", "bar*")) -): BucketSelectorExtAggregationBuilder { - return BucketSelectorExtAggregationBuilder(name, bucketsPathsMap, script, parentBucketPath, filter) -} + filter: BucketSelectorExtFilter = BucketSelectorExtFilter(IncludeExclude("foo*", "bar*")), +): BucketSelectorExtAggregationBuilder = BucketSelectorExtAggregationBuilder(name, bucketsPathsMap, script, parentBucketPath, filter) fun randomBucketSelectorScript( idOrCode: String = "params.avg >= 0", - params: Map = mutableMapOf("avg" to "10") -): Script { - return Script(Script.DEFAULT_SCRIPT_TYPE, Script.DEFAULT_SCRIPT_LANG, idOrCode, emptyMap(), params) -} + params: Map = mutableMapOf("avg" to "10"), +): Script = Script(Script.DEFAULT_SCRIPT_TYPE, Script.DEFAULT_SCRIPT_LANG, idOrCode, emptyMap(), params) fun randomEmailAccount( salt: String = "", @@ -455,28 +499,26 @@ fun randomEmailAccount( port: Int = randomIntBetween(1, 100), method: EmailAccount.MethodType = randomEmailAccountMethod(), username: SecureString? = null, - password: SecureString? = null -): EmailAccount { - return EmailAccount( + password: SecureString? = null, +): EmailAccount = + EmailAccount( name = name, email = email, host = host, port = port, method = method, username = username, - password = password + password = password, ) -} fun randomEmailGroup( salt: String = "", name: String = salt + OpenSearchRestTestCase.randomAlphaOfLength(10), - emails: List = (1..randomInt(10)).map { - EmailEntry(email = salt + OpenSearchRestTestCase.randomAlphaOfLength(5) + "@email.com") - } -): EmailGroup { - return EmailGroup(name = name, emails = emails) -} + emails: List = + (1..randomInt(10)).map { + EmailEntry(email = salt + OpenSearchRestTestCase.randomAlphaOfLength(5) + "@email.com") + }, +): EmailGroup = EmailGroup(name = name, emails = emails) fun randomScript(source: String = "return " + OpenSearchRestTestCase.randomBoolean().toString()): Script = Script(source) @@ -493,6 +535,7 @@ val TEST_HR_INDEX = "hr_data" val TEST_NON_HR_INDEX = "not_hr_data" val TEST_HR_ROLE = "hr_role" val TEST_HR_BACKEND_ROLE = "HR" + // Using a triple-quote string for the query so escaped quotes are kept as-is // in the request made using triple-quote strings (i.e. createIndexRoleWithDocLevelSecurity). // Removing the escape slash in the request causes the security API role request to fail with parsing exception. @@ -500,7 +543,7 @@ val TERM_DLS_QUERY = """{\"term\": { \"accessible\": true}}""" fun randomTemplateScript( source: String, - params: Map = emptyMap() + params: Map = emptyMap(), ): Script = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, source, params) fun randomAction( @@ -509,7 +552,7 @@ fun randomAction( subjectTemplate: Script = template, destinationId: String = "abc", throttleEnabled: Boolean = false, - throttle: Throttle = randomThrottle() + throttle: Throttle = randomThrottle(), ) = Action(name, destinationId, subjectTemplate, template, throttleEnabled, throttle, actionExecutionPolicy = null) fun randomActionWithPolicy( @@ -518,40 +561,40 @@ fun randomActionWithPolicy( destinationId: String = "", throttleEnabled: Boolean = false, throttle: Throttle = randomThrottle(), - actionExecutionPolicy: ActionExecutionPolicy? = randomActionExecutionPolicy() -): Action { - return if (actionExecutionPolicy?.actionExecutionScope is PerExecutionActionScope) { + actionExecutionPolicy: ActionExecutionPolicy? = randomActionExecutionPolicy(), +): Action = + if (actionExecutionPolicy?.actionExecutionScope is PerExecutionActionScope) { // Return null for throttle when using PerExecutionActionScope since throttling is currently not supported for it Action(name, destinationId, template, template, throttleEnabled, null, actionExecutionPolicy = actionExecutionPolicy) } else { Action(name, destinationId, template, template, throttleEnabled, throttle, actionExecutionPolicy = actionExecutionPolicy) } -} fun randomThrottle( value: Int = randomIntBetween(60, 120), - unit: ChronoUnit = ChronoUnit.MINUTES + unit: ChronoUnit = ChronoUnit.MINUTES, ) = Throttle(value, unit) -fun randomActionExecutionPolicy( - actionExecutionScope: ActionExecutionScope = randomActionExecutionScope() -) = ActionExecutionPolicy(actionExecutionScope) +fun randomActionExecutionPolicy(actionExecutionScope: ActionExecutionScope = randomActionExecutionScope()) = + ActionExecutionPolicy(actionExecutionScope) -fun randomActionExecutionScope(): ActionExecutionScope { - return if (randomBoolean()) { +fun randomActionExecutionScope(): ActionExecutionScope = + if (randomBoolean()) { val alertCategories = AlertCategory.values() PerAlertActionScope(actionableAlerts = (1..randomInt(alertCategories.size)).map { alertCategories[it - 1] }.toSet()) } else { PerExecutionActionScope() } -} fun randomAlert(monitor: Monitor = randomQueryLevelMonitor()): Alert { val trigger = randomQueryLevelTrigger() val actionExecutionResults = mutableListOf(randomActionExecutionResult(), randomActionExecutionResult()) return Alert( - monitor, trigger, Instant.now().truncatedTo(ChronoUnit.MILLIS), null, - actionExecutionResults = actionExecutionResults + monitor, + trigger, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + actionExecutionResults = actionExecutionResults, ) } @@ -570,9 +613,9 @@ fun randomAlertV2( triggeredTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), errorMessage: String? = "sample error message", severity: Severity = Severity.entries.random(), - executionId: String? = UUIDs.base64UUID() -): AlertV2 { - return AlertV2( + executionId: String? = UUIDs.base64UUID(), +): AlertV2 = + AlertV2( id = id, version = version, schemaVersion = schemaVersion, @@ -589,24 +632,19 @@ fun randomAlertV2( severity = severity, executionId = executionId, ) -} fun randomDocLevelQuery( id: String = OpenSearchRestTestCase.randomAlphaOfLength(10), query: String = OpenSearchRestTestCase.randomAlphaOfLength(10), name: String = "${randomInt(5)}", - tags: List = mutableListOf(0..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) } -): DocLevelQuery { - return DocLevelQuery(id = id, query = query, name = name, tags = tags, fields = listOf()) -} + tags: List = mutableListOf(0..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) }, +): DocLevelQuery = DocLevelQuery(id = id, query = query, name = name, tags = tags, fields = listOf()) fun randomDocLevelMonitorInput( description: String = OpenSearchRestTestCase.randomAlphaOfLength(randomInt(10)), indices: List = listOf(1..randomInt(10)).map { OpenSearchRestTestCase.randomAlphaOfLength(10) }, - queries: List = listOf(1..randomInt(10)).map { randomDocLevelQuery() } -): DocLevelMonitorInput { - return DocLevelMonitorInput(description = description, indices = indices, queries = queries) -} + queries: List = listOf(1..randomInt(10)).map { randomDocLevelQuery() }, +): DocLevelMonitorInput = DocLevelMonitorInput(description = description, indices = indices, queries = queries) fun randomFinding( id: String = OpenSearchRestTestCase.randomAlphaOfLength(10), @@ -615,30 +653,33 @@ fun randomFinding( monitorName: String = OpenSearchRestTestCase.randomAlphaOfLength(10), index: String = OpenSearchRestTestCase.randomAlphaOfLength(10), docLevelQueries: List = listOf(randomDocLevelQuery()), - timestamp: Instant = Instant.now() -): Finding { - return Finding( + timestamp: Instant = Instant.now(), +): Finding = + Finding( id = id, relatedDocIds = relatedDocIds, monitorId = monitorId, monitorName = monitorName, index = index, docLevelQueries = docLevelQueries, - timestamp = timestamp + timestamp = timestamp, ) -} fun randomAlertWithAggregationResultBucket(monitor: Monitor = randomBucketLevelMonitor()): Alert { val trigger = randomBucketLevelTrigger() val actionExecutionResults = mutableListOf(randomActionExecutionResult(), randomActionExecutionResult()) return Alert( - monitor, trigger, Instant.now().truncatedTo(ChronoUnit.MILLIS), null, + monitor, + trigger, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, actionExecutionResults = actionExecutionResults, - aggregationResultBucket = AggregationResultBucket( - "parent_bucket_path_1", - listOf("bucket_key_1"), - mapOf("k1" to "val1", "k2" to "val2") - ) + aggregationResultBucket = + AggregationResultBucket( + "parent_bucket_path_1", + listOf("bucket_key_1"), + mapOf("k1" to "val1", "k2" to "val2"), + ), ) } @@ -651,7 +692,7 @@ fun randomEmailAccountMethod(): EmailAccount.MethodType { fun randomActionExecutionResult( actionId: String = UUIDs.base64UUID(), lastExecutionTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), - throttledCount: Int = randomInt() + throttledCount: Int = randomInt(), ) = ActionExecutionResult(actionId, lastExecutionTime, throttledCount) fun randomQueryLevelMonitorRunResult(): MonitorRunResult { @@ -665,7 +706,7 @@ fun randomQueryLevelMonitorRunResult(): MonitorRunResult() @@ -713,38 +752,39 @@ fun randomQueryLevelTriggerRunResult(): QueryLevelTriggerRunResult { fun randomClusterMetricsInput( path: String = ClusterMetricsInput.ClusterMetricType.CLUSTER_HEALTH.defaultPath, pathParams: String = "", - url: String = "" -): ClusterMetricsInput { - return ClusterMetricsInput(path, pathParams, url) -} + url: String = "", +): ClusterMetricsInput = ClusterMetricsInput(path, pathParams, url) fun randomBucketLevelTriggerRunResult(): BucketLevelTriggerRunResult { val map = mutableMapOf() map.plus(Pair("key1", randomActionRunResult())) map.plus(Pair("key2", randomActionRunResult())) - val aggBucket1 = AggregationResultBucket( - "parent_bucket_path_1", - listOf("bucket_key_1"), - mapOf("k1" to "val1", "k2" to "val2") - ) - val aggBucket2 = AggregationResultBucket( - "parent_bucket_path_2", - listOf("bucket_key_2"), - mapOf("k1" to "val1", "k2" to "val2") - ) + val aggBucket1 = + AggregationResultBucket( + "parent_bucket_path_1", + listOf("bucket_key_1"), + mapOf("k1" to "val1", "k2" to "val2"), + ) + val aggBucket2 = + AggregationResultBucket( + "parent_bucket_path_2", + listOf("bucket_key_2"), + mapOf("k1" to "val1", "k2" to "val2"), + ) val actionResultsMap: MutableMap> = mutableMapOf() actionResultsMap[aggBucket1.getBucketKeysHash()] = map actionResultsMap[aggBucket2.getBucketKeysHash()] = map return BucketLevelTriggerRunResult( - "trigger-name", null, + "trigger-name", + null, mapOf( aggBucket1.getBucketKeysHash() to aggBucket1, - aggBucket2.getBucketKeysHash() to aggBucket2 + aggBucket2.getBucketKeysHash() to aggBucket2, ), - actionResultsMap + actionResultsMap, ) } @@ -756,7 +796,7 @@ fun randomDocumentLevelTriggerRunResult(): DocumentLevelTriggerRunResult { "trigger-name", mutableListOf(UUIDs.randomBase64UUID().toString()), null, - mutableMapOf(Pair("alertId", map)) + mutableMapOf(Pair("alertId", map)), ) } @@ -765,8 +805,12 @@ fun randomActionRunResult(): ActionRunResult { map.plus(Pair("key1", "val1")) map.plus(Pair("key2", "val2")) return ActionRunResult( - "1234", "test-action", map, - false, Instant.now(), null + "1234", + "test-action", + map, + false, + Instant.now(), + null, ) } @@ -775,21 +819,18 @@ fun Alert.toJsonString(): String { return this.toXContent(builder, ToXContent.EMPTY_PARAMS).string() } -fun randomUser(): User { - return User( +fun randomUser(): User = + User( OpenSearchRestTestCase.randomAlphaOfLength(10), listOf( OpenSearchRestTestCase.randomAlphaOfLength(10), - OpenSearchRestTestCase.randomAlphaOfLength(10) + OpenSearchRestTestCase.randomAlphaOfLength(10), ), listOf(OpenSearchRestTestCase.randomAlphaOfLength(10), ALL_ACCESS_ROLE), mapOf("test_attr" to "test"), ) -} -fun randomUserEmpty(): User { - return User("", listOf(), listOf(), mapOf()) -} +fun randomUserEmpty(): User = User("", listOf(), listOf(), mapOf()) fun EmailAccount.toJsonString(): String { val builder = XContentFactory.jsonBuilder() @@ -812,7 +853,7 @@ fun RestClient.makeRequest( endpoint: String, params: Map = emptyMap(), entity: HttpEntity? = null, - vararg headers: Header + vararg headers: Header, ): Response { val request = Request(method, endpoint) // TODO: remove PERMISSIVE option after moving system index access to REST API call @@ -837,7 +878,7 @@ fun RestClient.makeRequest( method: String, endpoint: String, entity: HttpEntity? = null, - vararg headers: Header + vararg headers: Header, ): Response { val request = Request(method, endpoint) val options = RequestOptions.DEFAULT.toBuilder() @@ -851,9 +892,7 @@ fun RestClient.makeRequest( return performRequest(request) } -fun builder(): XContentBuilder { - return XContentBuilder.builder(XContentType.JSON.xContent()) -} +fun builder(): XContentBuilder = XContentBuilder.builder(XContentType.JSON.xContent()) fun parser(xc: String): XContentParser { val parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc) @@ -861,17 +900,16 @@ fun parser(xc: String): XContentParser { return parser } -fun xContentRegistry(): NamedXContentRegistry { - return NamedXContentRegistry( +fun xContentRegistry(): NamedXContentRegistry = + NamedXContentRegistry( listOf( SearchInput.XCONTENT_REGISTRY, DocLevelMonitorInput.XCONTENT_REGISTRY, QueryLevelTrigger.XCONTENT_REGISTRY, BucketLevelTrigger.XCONTENT_REGISTRY, - DocumentLevelTrigger.XCONTENT_REGISTRY - ) + SearchModule(Settings.EMPTY, emptyList()).namedXContents + DocumentLevelTrigger.XCONTENT_REGISTRY, + ) + SearchModule(Settings.EMPTY, emptyList()).namedXContents, ) -} fun assertUserNull(map: Map) { val user = map["user"] @@ -892,45 +930,49 @@ fun randomChainedAlertTrigger( severity: String = "1", condition: Script = randomScript(), actions: List = mutableListOf(), - destinationId: String = "" -): ChainedAlertTrigger { - return ChainedAlertTrigger( + destinationId: String = "", +): ChainedAlertTrigger = + ChainedAlertTrigger( id = id, name = name, severity = severity, condition = condition, - actions = if (actions.isEmpty() && destinationId.isNotBlank()) { - (0..randomInt(10)).map { randomAction(destinationId = destinationId) } - } else actions + actions = + if (actions.isEmpty() && destinationId.isNotBlank()) { + (0..randomInt(10)).map { randomAction(destinationId = destinationId) } + } else { + actions + }, ) -} fun randomAlertContext( alert: Alert = randomAlert(), - associatedQueries: List? = (-1..2).random().takeIf { it != -1 }?.let { - (0..it).map { randomDocLevelQuery() } - }, - sampleDocs: List>? = (-1..2).random().takeIf { it != -1 }?.let { - (0..it).map { - // Using 'randomFinding' to mimic documents in an index. - randomFinding().asTemplateArg() - } - } -): AlertContext { - return AlertContext( + associatedQueries: List? = + (-1..2).random().takeIf { it != -1 }?.let { + (0..it).map { randomDocLevelQuery() } + }, + sampleDocs: List>? = + (-1..2).random().takeIf { it != -1 }?.let { + (0..it).map { + // Using 'randomFinding' to mimic documents in an index. + randomFinding().asTemplateArg() + } + }, +): AlertContext = + AlertContext( alert = alert, associatedQueries = associatedQueries, - sampleDocs = sampleDocs + sampleDocs = sampleDocs, ) -} -@Suppress("UNCHECKED_CAST") /** helper that returns a field in a json map whose values are all json objects */ -fun Map.objectMap(key: String): Map> { - return this[key] as Map> -} +@Suppress("UNCHECKED_CAST") +fun Map.objectMap(key: String): Map> = this[key] as Map> -fun assertPplMonitorsEqual(pplMonitor1: PPLSQLMonitor, pplMonitor2: PPLSQLMonitor) { +fun assertPplMonitorsEqual( + pplMonitor1: PPLSQLMonitor, + pplMonitor2: PPLSQLMonitor, +) { // note: Get and Search Monitor responses do not include User information by // design, so that check is skipped @@ -954,11 +996,14 @@ fun assertPplMonitorsEqual(pplMonitor1: PPLSQLMonitor, pplMonitor2: PPLSQLMonito } } -fun assertPplTriggersEqual(pplTrigger1: PPLSQLTrigger, pplTrigger2: PPLSQLTrigger) { +fun assertPplTriggersEqual( + pplTrigger1: PPLSQLTrigger, + pplTrigger2: PPLSQLTrigger, +) { assertEquals( "Monitor trigger IDs not equal", pplTrigger1.id, - pplTrigger2.id + pplTrigger2.id, ) val id = pplTrigger1.id @@ -966,124 +1011,127 @@ fun assertPplTriggersEqual(pplTrigger1: PPLSQLTrigger, pplTrigger2: PPLSQLTrigge assertEquals( "Monitor trigger $id names not equal", pplTrigger1.name, - pplTrigger2.name + pplTrigger2.name, ) assertEquals( "Monitor trigger $id severities not equal", pplTrigger1.severity, - pplTrigger2.severity + pplTrigger2.severity, ) assertEquals( "Monitor trigger $id throttle durations not equal", pplTrigger1.throttleDuration, - pplTrigger2.throttleDuration + pplTrigger2.throttleDuration, ) assertEquals( "Monitor trigger $id expire durations not equal", pplTrigger1.expireDuration, - pplTrigger2.expireDuration + pplTrigger2.expireDuration, ) assertEquals( "Monitor trigger $id modes not equal", pplTrigger1.mode, - pplTrigger2.mode + pplTrigger2.mode, ) assertEquals( "Monitor trigger $id condition types not equal", pplTrigger1.conditionType, - pplTrigger2.conditionType + pplTrigger2.conditionType, ) assertEquals( "Monitor trigger $id number_of_results conditions not equal", pplTrigger1.numResultsCondition, - pplTrigger2.numResultsCondition + pplTrigger2.numResultsCondition, ) assertEquals( "Monitor trigger $id number_of_results values not equal", pplTrigger1.numResultsValue, - pplTrigger2.numResultsValue + pplTrigger2.numResultsValue, ) assertEquals( "Monitor trigger $id custom conditions not equal", pplTrigger1.customCondition, - pplTrigger2.customCondition + pplTrigger2.customCondition, ) } -fun assertAlertV2sEqual(alert1: AlertV2, alert2: AlertV2) { +fun assertAlertV2sEqual( + alert1: AlertV2, + alert2: AlertV2, +) { assertEquals( "AlertV2 IDs are not equal", alert1.id, - alert2.id + alert2.id, ) assertEquals( "AlertV2 versions are not equal", alert1.version, - alert2.version + alert2.version, ) assertEquals( "AlertV2 schema versions are not equal", alert1.schemaVersion, - alert2.schemaVersion + alert2.schemaVersion, ) assertEquals( "AlertV2 monitor IDs are not equal", alert1.monitorId, - alert2.monitorId + alert2.monitorId, ) assertEquals( "AlertV2 monitor names are not equal", alert1.monitorName, - alert2.monitorName + alert2.monitorName, ) assertEquals( "AlertV2 monitor versions are not equal", alert1.monitorVersion, - alert2.monitorVersion + alert2.monitorVersion, ) assertEquals( "AlertV2 monitor users are not equal", alert1.monitorUser.toString(), - alert2.monitorUser.toString() + alert2.monitorUser.toString(), ) assertEquals( "AlertV2 trigger IDs are not equal", alert1.triggerId, - alert2.triggerId + alert2.triggerId, ) assertEquals( "AlertV2 trigger names are not equal", alert1.triggerName, - alert2.triggerName + alert2.triggerName, ) assertEquals( "AlertV2 queries are not equal", alert1.query, - alert2.query + alert2.query, ) assertEquals( "AlertV2 query results are not equal", alert1.queryResults, - alert2.queryResults + alert2.queryResults, ) assertEquals( "AlertV2 triggered times are not equal", alert1.triggeredTime, - alert2.triggeredTime + alert2.triggeredTime, ) assertEquals( "AlertV2 error messages are not equal", alert1.errorMessage, - alert2.errorMessage + alert2.errorMessage, ) assertEquals( "AlertV2 severities are not equal", alert1.severity, - alert2.severity + alert2.severity, ) assertEquals( "AlertV2 execution IDs are not equal", alert1.executionId, - alert2.executionId + alert2.executionId, ) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt index 89e62756b..f5fbacbe5 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt @@ -29,83 +29,87 @@ class TriggerServiceTests : OpenSearchTestCase() { } fun `test run bucket level trigger with bucket key as int`() { - val bucketSelectorExtAggregationBuilder = randomBucketSelectorExtAggregationBuilder( - bucketsPathsMap = mutableMapOf("_count" to "_count", "_key" to "_key"), - script = randomScript(source = "params._count > 0"), - parentBucketPath = "status_code" - ) + val bucketSelectorExtAggregationBuilder = + randomBucketSelectorExtAggregationBuilder( + bucketsPathsMap = mutableMapOf("_count" to "_count", "_key" to "_key"), + script = randomScript(source = "params._count > 0"), + parentBucketPath = "status_code", + ) val trigger = randomBucketLevelTrigger(bucketSelector = bucketSelectorExtAggregationBuilder) val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val inputResultsStr = "{\n" + - " \"_shards\": {\n" + - " \"total\": 1,\n" + - " \"failed\": 0,\n" + - " \"successful\": 1,\n" + - " \"skipped\": 0\n" + - " },\n" + - " \"hits\": {\n" + - " \"hits\": [\n" + - " {\n" + - " \"_index\": \"sample-http-responses\",\n" + - " \"_type\": \"http\",\n" + - " \"_source\": {\n" + - " \"status_code\": 100,\n" + - " \"http_4xx\": 0,\n" + - " \"http_3xx\": 0,\n" + - " \"http_5xx\": 0,\n" + - " \"http_2xx\": 0,\n" + - " \"timestamp\": 100000,\n" + - " \"http_1xx\": 1\n" + - " },\n" + - " \"_id\": 1,\n" + - " \"_score\": 1\n" + - " }\n" + - " ],\n" + - " \"total\": {\n" + - " \"value\": 4,\n" + - " \"relation\": \"eq\"\n" + - " },\n" + - " \"max_score\": 1\n" + - " },\n" + - " \"took\": 37,\n" + - " \"timed_out\": false,\n" + - " \"aggregations\": {\n" + - " \"status_code\": {\n" + - " \"doc_count_error_upper_bound\": 0,\n" + - " \"sum_other_doc_count\": 0,\n" + - " \"buckets\": [\n" + - " {\n" + - " \"doc_count\": 2,\n" + - " \"key\": 100\n" + - " },\n" + - " {\n" + - " \"doc_count\": 1,\n" + - " \"key\": 102\n" + - " },\n" + - " {\n" + - " \"doc_count\": 1,\n" + - " \"key\": 201\n" + - " }\n" + - " ]\n" + - " },\n" + - " \"${trigger.id}\": {\n" + - " \"parent_bucket_path\": \"status_code\",\n" + - " \"bucket_indices\": [\n" + - " 0,\n" + - " 1,\n" + - " 2\n" + - " ]\n" + - " }\n" + - " }\n" + - "}" + val inputResultsStr = + "{\n" + + " \"_shards\": {\n" + + " \"total\": 1,\n" + + " \"failed\": 0,\n" + + " \"successful\": 1,\n" + + " \"skipped\": 0\n" + + " },\n" + + " \"hits\": {\n" + + " \"hits\": [\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 100,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 100000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 1,\n" + + " \"_score\": 1\n" + + " }\n" + + " ],\n" + + " \"total\": {\n" + + " \"value\": 4,\n" + + " \"relation\": \"eq\"\n" + + " },\n" + + " \"max_score\": 1\n" + + " },\n" + + " \"took\": 37,\n" + + " \"timed_out\": false,\n" + + " \"aggregations\": {\n" + + " \"status_code\": {\n" + + " \"doc_count_error_upper_bound\": 0,\n" + + " \"sum_other_doc_count\": 0,\n" + + " \"buckets\": [\n" + + " {\n" + + " \"doc_count\": 2,\n" + + " \"key\": 100\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": 102\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": 201\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"${trigger.id}\": {\n" + + " \"parent_bucket_path\": \"status_code\",\n" + + " \"bucket_indices\": [\n" + + " 0,\n" + + " 1,\n" + + " 2\n" + + " ]\n" + + " }\n" + + " }\n" + + "}" - val parser = XContentType.JSON.xContent() - .createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - inputResultsStr - ) + val parser = + XContentType.JSON + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + inputResultsStr, + ) val inputResults = parser.map() @@ -118,135 +122,139 @@ class TriggerServiceTests : OpenSearchTestCase() { } fun `test run bucket level trigger with bucket key as map`() { - val bucketSelectorExtAggregationBuilder = randomBucketSelectorExtAggregationBuilder( - bucketsPathsMap = mutableMapOf("_count" to "_count", "_key" to "_key"), - script = randomScript(source = "params._count > 0"), - parentBucketPath = "status_code" - ) + val bucketSelectorExtAggregationBuilder = + randomBucketSelectorExtAggregationBuilder( + bucketsPathsMap = mutableMapOf("_count" to "_count", "_key" to "_key"), + script = randomScript(source = "params._count > 0"), + parentBucketPath = "status_code", + ) val trigger = randomBucketLevelTrigger(bucketSelector = bucketSelectorExtAggregationBuilder) val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val inputResultsStr = "{\n" + - " \"_shards\": {\n" + - " \"total\": 1,\n" + - " \"failed\": 0,\n" + - " \"successful\": 1,\n" + - " \"skipped\": 0\n" + - " },\n" + - " \"hits\": {\n" + - " \"hits\": [\n" + - " {\n" + - " \"_index\": \"sample-http-responses\",\n" + - " \"_type\": \"http\",\n" + - " \"_source\": {\n" + - " \"status_code\": 100,\n" + - " \"http_4xx\": 0,\n" + - " \"http_3xx\": 0,\n" + - " \"http_5xx\": 0,\n" + - " \"http_2xx\": 0,\n" + - " \"timestamp\": 100000,\n" + - " \"http_1xx\": 1\n" + - " },\n" + - " \"_id\": 1,\n" + - " \"_score\": 1\n" + - " },\n" + - " {\n" + - " \"_index\": \"sample-http-responses\",\n" + - " \"_type\": \"http\",\n" + - " \"_source\": {\n" + - " \"status_code\": 102,\n" + - " \"http_4xx\": 0,\n" + - " \"http_3xx\": 0,\n" + - " \"http_5xx\": 0,\n" + - " \"http_2xx\": 0,\n" + - " \"timestamp\": 160000,\n" + - " \"http_1xx\": 1\n" + - " },\n" + - " \"_id\": 2,\n" + - " \"_score\": 1\n" + - " },\n" + - " {\n" + - " \"_index\": \"sample-http-responses\",\n" + - " \"_type\": \"http\",\n" + - " \"_source\": {\n" + - " \"status_code\": 100,\n" + - " \"http_4xx\": 0,\n" + - " \"http_3xx\": 0,\n" + - " \"http_5xx\": 0,\n" + - " \"http_2xx\": 0,\n" + - " \"timestamp\": 220000,\n" + - " \"http_1xx\": 1\n" + - " },\n" + - " \"_id\": 4,\n" + - " \"_score\": 1\n" + - " },\n" + - " {\n" + - " \"_index\": \"sample-http-responses\",\n" + - " \"_type\": \"http\",\n" + - " \"_source\": {\n" + - " \"status_code\": 201,\n" + - " \"http_4xx\": 0,\n" + - " \"http_3xx\": 0,\n" + - " \"http_5xx\": 0,\n" + - " \"http_2xx\": 1,\n" + - " \"timestamp\": 280000,\n" + - " \"http_1xx\": 0\n" + - " },\n" + - " \"_id\": 5,\n" + - " \"_score\": 1\n" + - " }\n" + - " ],\n" + - " \"total\": {\n" + - " \"value\": 4,\n" + - " \"relation\": \"eq\"\n" + - " },\n" + - " \"max_score\": 1\n" + - " },\n" + - " \"took\": 15,\n" + - " \"timed_out\": false,\n" + - " \"aggregations\": {\n" + - " \"${trigger.id}\": {\n" + - " \"parent_bucket_path\": \"status_code\",\n" + - " \"bucket_indices\": [\n" + - " 0,\n" + - " 1,\n" + - " 2\n" + - " ]\n" + - " },\n" + - " \"status_code\": {\n" + - " \"buckets\": [\n" + - " {\n" + - " \"doc_count\": 2,\n" + - " \"key\": {\n" + - " \"status_code\": 100\n" + - " }\n" + - " },\n" + - " {\n" + - " \"doc_count\": 1,\n" + - " \"key\": {\n" + - " \"status_code\": 102\n" + - " }\n" + - " },\n" + - " {\n" + - " \"doc_count\": 1,\n" + - " \"key\": {\n" + - " \"status_code\": 201\n" + - " }\n" + - " }\n" + - " ],\n" + - " \"after_key\": {\n" + - " \"status_code\": 201\n" + - " }\n" + - " }\n" + - " }\n" + - "}" + val inputResultsStr = + "{\n" + + " \"_shards\": {\n" + + " \"total\": 1,\n" + + " \"failed\": 0,\n" + + " \"successful\": 1,\n" + + " \"skipped\": 0\n" + + " },\n" + + " \"hits\": {\n" + + " \"hits\": [\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 100,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 100000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 1,\n" + + " \"_score\": 1\n" + + " },\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 102,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 160000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 2,\n" + + " \"_score\": 1\n" + + " },\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 100,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 220000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 4,\n" + + " \"_score\": 1\n" + + " },\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 201,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 1,\n" + + " \"timestamp\": 280000,\n" + + " \"http_1xx\": 0\n" + + " },\n" + + " \"_id\": 5,\n" + + " \"_score\": 1\n" + + " }\n" + + " ],\n" + + " \"total\": {\n" + + " \"value\": 4,\n" + + " \"relation\": \"eq\"\n" + + " },\n" + + " \"max_score\": 1\n" + + " },\n" + + " \"took\": 15,\n" + + " \"timed_out\": false,\n" + + " \"aggregations\": {\n" + + " \"${trigger.id}\": {\n" + + " \"parent_bucket_path\": \"status_code\",\n" + + " \"bucket_indices\": [\n" + + " 0,\n" + + " 1,\n" + + " 2\n" + + " ]\n" + + " },\n" + + " \"status_code\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"doc_count\": 2,\n" + + " \"key\": {\n" + + " \"status_code\": 100\n" + + " }\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": {\n" + + " \"status_code\": 102\n" + + " }\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": {\n" + + " \"status_code\": 201\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"after_key\": {\n" + + " \"status_code\": 201\n" + + " }\n" + + " }\n" + + " }\n" + + "}" - val parser = XContentType.JSON.xContent() - .createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - inputResultsStr - ) + val parser = + XContentType.JSON + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + inputResultsStr, + ) val inputResults = parser.map() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorActionTests.kt index d284fc70e..21b7e94a4 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorActionTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorActionTests.kt @@ -5,9 +5,10 @@ package org.opensearch.alerting.action import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class ExecuteMonitorActionTests : OpenSearchTestCase() { - + @Test fun `test execute monitor action name`() { assertNotNull(ExecuteMonitorAction.INSTANCE.name()) assertEquals(ExecuteMonitorAction.INSTANCE.name(), ExecuteMonitorAction.NAME) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt index f54b6fea6..a244fe27c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorRequestTests.kt @@ -12,11 +12,11 @@ import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class ExecuteMonitorRequestTests : OpenSearchTestCase() { - + @Test fun `test execute monitor request with id`() { - val req = ExecuteMonitorRequest(false, TimeValue.timeValueSeconds(100L), "1234", null) assertNotNull(req) @@ -30,6 +30,7 @@ class ExecuteMonitorRequestTests : OpenSearchTestCase() { assertEquals(req.monitor, newReq.monitor) } + @Test fun `test execute monitor request with monitor`() { val monitor = randomQueryLevelMonitor().copy(inputs = listOf(SearchInput(emptyList(), SearchSourceBuilder()))) val req = ExecuteMonitorRequest(false, TimeValue.timeValueSeconds(100L), null, monitor) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponseTests.kt index 10ccd7038..d37b028f9 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/ExecuteMonitorResponseTests.kt @@ -11,9 +11,10 @@ import org.opensearch.alerting.randomQueryLevelMonitorRunResult import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class ExecuteMonitorResponseTests : OpenSearchTestCase() { - + @Test fun `test exec query-level monitor response`() { val req = ExecuteMonitorResponse(randomQueryLevelMonitorRunResult()) Assert.assertNotNull(req) @@ -27,6 +28,7 @@ class ExecuteMonitorResponseTests : OpenSearchTestCase() { assertNotNull(newReq.monitorRunResult.inputResults) } + @Test fun `test exec bucket-level monitor response`() { val req = ExecuteMonitorResponse(randomBucketLevelMonitorRunResult()) Assert.assertNotNull(req) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsActionTests.kt index c18e33790..f911b19fc 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsActionTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsActionTests.kt @@ -6,9 +6,10 @@ package org.opensearch.alerting.action import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetDestinationsActionTests : OpenSearchTestCase() { - + @Test fun `test get destinations action name`() { assertNotNull(GetDestinationsAction.INSTANCE.name()) assertEquals(GetDestinationsAction.INSTANCE.name(), GetDestinationsAction.NAME) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt index 7c76621f9..8240aaa4c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsRequestTests.kt @@ -10,11 +10,11 @@ import org.opensearch.commons.alerting.model.Table import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetDestinationsRequestTests : OpenSearchTestCase() { - + @Test fun `test get destination request`() { - val table = Table("asc", "sortString", null, 1, 0, "") val req = GetDestinationsRequest("1234", 1L, FetchSourceContext.FETCH_SOURCE, table, "slack") assertNotNull(req) @@ -30,8 +30,8 @@ class GetDestinationsRequestTests : OpenSearchTestCase() { assertEquals("slack", newReq.destinationType) } + @Test fun `test get destination request without src context`() { - val table = Table("asc", "sortString", null, 1, 0, "") val req = GetDestinationsRequest("1234", 1L, null, table, "slack") assertNotNull(req) @@ -47,8 +47,8 @@ class GetDestinationsRequestTests : OpenSearchTestCase() { assertEquals("slack", newReq.destinationType) } + @Test fun `test get destination request without destinationId`() { - val table = Table("asc", "sortString", null, 1, 0, "") val req = GetDestinationsRequest(null, 1L, FetchSourceContext.FETCH_SOURCE, table, "slack") assertNotNull(req) @@ -64,8 +64,8 @@ class GetDestinationsRequestTests : OpenSearchTestCase() { assertEquals("slack", newReq.destinationType) } + @Test fun `test get destination request with filter`() { - val table = Table("asc", "sortString", null, 1, 0, "") val req = GetDestinationsRequest(null, 1L, FetchSourceContext.FETCH_SOURCE, table, "slack") assertNotNull(req) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsResponseTests.kt index ed837bdce..9af0dd07f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetDestinationsResponseTests.kt @@ -14,9 +14,10 @@ import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import java.time.Instant import java.util.Collections +import kotlin.test.Test class GetDestinationsResponseTests : OpenSearchTestCase() { - + @Test fun `test get destination response with no destinations`() { val req = GetDestinationsResponse(RestStatus.OK, 0, Collections.emptyList()) assertNotNull(req) @@ -30,23 +31,25 @@ class GetDestinationsResponseTests : OpenSearchTestCase() { assertEquals(RestStatus.OK, newReq.status) } + @Test fun `test get destination response with a destination`() { val slack = Slack("url") - val destination = Destination( - "id", - 0L, - 0, - 0, - 0, - DestinationType.SLACK, - "name", - null, - Instant.MIN, - null, - slack, - null, - null - ) + val destination = + Destination( + "id", + 0L, + 0, + 0, + 0, + DestinationType.SLACK, + "name", + null, + Instant.MIN, + null, + slack, + null, + null, + ) val req = GetDestinationsResponse(RestStatus.OK, 1, listOf(destination)) assertNotNull(req) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountActionTests.kt index cb26b182e..e5e1783eb 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountActionTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountActionTests.kt @@ -6,9 +6,10 @@ package org.opensearch.alerting.action import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetEmailAccountActionTests : OpenSearchTestCase() { - + @Test fun `test get email account name`() { assertNotNull(GetEmailAccountAction.INSTANCE.name()) assertEquals(GetEmailAccountAction.INSTANCE.name(), GetEmailAccountAction.NAME) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountRequestTests.kt index 02631a38b..63589a437 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountRequestTests.kt @@ -10,11 +10,11 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.rest.RestRequest import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetEmailAccountRequestTests : OpenSearchTestCase() { - + @Test fun `test get email account request`() { - val req = GetEmailAccountRequest("1234", 1L, RestRequest.Method.GET, FetchSourceContext.FETCH_SOURCE) assertNotNull(req) @@ -28,8 +28,8 @@ class GetEmailAccountRequestTests : OpenSearchTestCase() { assertEquals(FetchSourceContext.FETCH_SOURCE, newReq.srcContext) } + @Test fun `test head email account request`() { - val req = GetEmailAccountRequest("1234", 2L, RestRequest.Method.HEAD, FetchSourceContext.FETCH_SOURCE) assertNotNull(req) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountResponseTests.kt index ed60c3439..5db9a7227 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailAccountResponseTests.kt @@ -10,11 +10,11 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetEmailAccountResponseTests : OpenSearchTestCase() { - + @Test fun `test get email account response`() { - val res = GetEmailAccountResponse("1234", 1L, 2L, 0L, RestStatus.OK, null) assertNotNull(res) @@ -28,8 +28,8 @@ class GetEmailAccountResponseTests : OpenSearchTestCase() { assertEquals(null, newRes.emailAccount) } + @Test fun `test get email account with email account`() { - val emailAccount = randomEmailAccount(name = "test_email_account") val res = GetEmailAccountResponse("1234", 1L, 2L, 0L, RestStatus.OK, emailAccount) assertNotNull(res) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupActionTests.kt index 647de76c3..14eb7f180 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupActionTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupActionTests.kt @@ -6,9 +6,10 @@ package org.opensearch.alerting.action import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetEmailGroupActionTests : OpenSearchTestCase() { - + @Test fun `test get email group name`() { assertNotNull(GetEmailGroupAction.INSTANCE.name()) assertEquals(GetEmailGroupAction.INSTANCE.name(), GetEmailGroupAction.NAME) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupRequestTests.kt index 7fa8b2037..8ddee07f3 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupRequestTests.kt @@ -10,11 +10,11 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.rest.RestRequest import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetEmailGroupRequestTests : OpenSearchTestCase() { - + @Test fun `test get email group request`() { - val req = GetEmailGroupRequest("1234", 1L, RestRequest.Method.GET, FetchSourceContext.FETCH_SOURCE) assertNotNull(req) @@ -28,8 +28,8 @@ class GetEmailGroupRequestTests : OpenSearchTestCase() { assertEquals(FetchSourceContext.FETCH_SOURCE, newReq.srcContext) } + @Test fun `test head email group request`() { - val req = GetEmailGroupRequest("1234", 1L, RestRequest.Method.HEAD, FetchSourceContext.FETCH_SOURCE) assertNotNull(req) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupResponseTests.kt index 19612fe4a..a2d7f34bc 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetEmailGroupResponseTests.kt @@ -10,11 +10,11 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetEmailGroupResponseTests : OpenSearchTestCase() { - + @Test fun `test get email group response`() { - val res = GetEmailGroupResponse("1234", 1L, 2L, 0L, RestStatus.OK, null) assertNotNull(res) @@ -28,8 +28,8 @@ class GetEmailGroupResponseTests : OpenSearchTestCase() { assertEquals(null, newRes.emailGroup) } + @Test fun `test get email group with email group`() { - val emailGroup = randomEmailGroup(name = "test-email-group") val res = GetEmailGroupResponse("1234", 1L, 2L, 0L, RestStatus.OK, emailGroup) assertNotNull(res) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt index 990da66fa..fd64f9c71 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetFindingsRequestTests.kt @@ -11,20 +11,21 @@ import org.opensearch.commons.alerting.model.Table import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.index.query.BoolQueryBuilder import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetFindingsRequestTests : OpenSearchTestCase() { - + @Test fun `test get findings request`() { - val table = Table("asc", "sortString", null, 1, 0, "") - val req = GetFindingsRequest( - "2121", - table, - "1", - "finding_index_name", - boolQueryBuilder = BoolQueryBuilder() - ) + val req = + GetFindingsRequest( + "2121", + table, + "1", + "finding_index_name", + boolQueryBuilder = BoolQueryBuilder(), + ) assertNotNull(req) val out = BytesStreamOutput() @@ -38,6 +39,7 @@ class GetFindingsRequestTests : OpenSearchTestCase() { assertEquals(table, newReq.table) } + @Test fun `test validate returns null`() { val table = Table("asc", "sortString", null, 1, 0, "") diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetRemoteIndexesActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetRemoteIndexesActionTests.kt index f4639dd6e..0bd0cf2c1 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/GetRemoteIndexesActionTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/GetRemoteIndexesActionTests.kt @@ -6,99 +6,112 @@ package org.opensearch.alerting.action import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetRemoteIndexesActionTests : OpenSearchTestCase() { - private val validPatterns = listOf( - "local-index-name", - "localindexname", - "local-index-*-pattern-*", - "*local-index-*-pattern-*", - "cluster-name:remote-index-name", - "cluster-name:remoteindexname", - "cluster-name:remote-index-*-pattern-*", - "cluster-name:*remote-index-*-pattern-*", - "cluster-*pattern-*:remote-index-name", - "cluster-*pattern-*:remoteindexname", - "cluster-*pattern-*:remote-index-*-pattern-*", - "cluster-*pattern-*:*remote-index-*-pattern-*", - "*cluster-*pattern-*:remote-index-*-pattern-*", - "cluster-*:pattern-*:remote-index-name", - "cluster-*:pattern-*:remoteindexname", - "cluster-*:pattern-*:remote-index-*-pattern-*", - "*cluster-*:pattern-*:remote-index-*-pattern-*", - ) - - private val invalidPatterns = listOf( - // `` character length less than 1 should return FALSE - ":remote-index-name", - - // `` character length greater than 255 should return FALSE - "${randomAlphaOfLength(256)}:remote-index-name", - - // Invalid characters should return FALSE - "local-index#-name", - "cluster-name:remote-#index-name", - "cluster-#name:remote-index-name", - "cluster-#name:remote-#index-name", + private val validPatterns = + listOf( + "local-index-name", + "localindexname", + "local-index-*-pattern-*", + "*local-index-*-pattern-*", + "cluster-name:remote-index-name", + "cluster-name:remoteindexname", + "cluster-name:remote-index-*-pattern-*", + "cluster-name:*remote-index-*-pattern-*", + "cluster-*pattern-*:remote-index-name", + "cluster-*pattern-*:remoteindexname", + "cluster-*pattern-*:remote-index-*-pattern-*", + "cluster-*pattern-*:*remote-index-*-pattern-*", + "*cluster-*pattern-*:remote-index-*-pattern-*", + "cluster-*:pattern-*:remote-index-name", + "cluster-*:pattern-*:remoteindexname", + "cluster-*:pattern-*:remote-index-*-pattern-*", + "*cluster-*:pattern-*:remote-index-*-pattern-*", + ) - // More than 1 `:` character in `` should return FALSE - "bad:cluster:name:remote-index-name", - ) + private val invalidPatterns = + listOf( + // `` character length less than 1 should return FALSE + ":remote-index-name", + // `` character length greater than 255 should return FALSE + "${randomAlphaOfLength(256)}:remote-index-name", + // Invalid characters should return FALSE + "local-index#-name", + "cluster-name:remote-#index-name", + "cluster-#name:remote-index-name", + "cluster-#name:remote-#index-name", + // More than 1 `:` character in `` should return FALSE + "bad:cluster:name:remote-index-name", + ) + @Test fun `test get remote indexes action name`() { assertNotNull(GetRemoteIndexesAction.INSTANCE.name()) assertEquals(GetRemoteIndexesAction.INSTANCE.name(), GetRemoteIndexesAction.NAME) } + @Test fun `test GetRemoteIndexesRequest isValid with empty array`() { - val request = GetRemoteIndexesRequest( - indexes = emptyList(), - includeMappings = false - ) + val request = + GetRemoteIndexesRequest( + indexes = emptyList(), + includeMappings = false, + ) assertFalse(request.isValid()) } + @Test fun `test GetRemoteIndexesRequest isValid with one valid entry`() { validPatterns.forEach { - val request = GetRemoteIndexesRequest( - indexes = listOf(it), - includeMappings = false - ) + val request = + GetRemoteIndexesRequest( + indexes = listOf(it), + includeMappings = false, + ) assertTrue("Expected pattern '$it' to be valid.", request.isValid()) } } + @Test fun `test GetRemoteIndexesRequest isValid with multiple valid entries`() { - val request = GetRemoteIndexesRequest( - indexes = validPatterns, - includeMappings = false - ) + val request = + GetRemoteIndexesRequest( + indexes = validPatterns, + includeMappings = false, + ) assertTrue(request.isValid()) } + @Test fun `test GetRemoteIndexesRequest isValid with one invalid entry`() { invalidPatterns.forEach { - val request = GetRemoteIndexesRequest( - indexes = listOf(it), - includeMappings = false - ) + val request = + GetRemoteIndexesRequest( + indexes = listOf(it), + includeMappings = false, + ) assertFalse("Expected pattern '$it' to be invalid.", request.isValid()) } } + @Test fun `test GetRemoteIndexesRequest isValid with multiple invalid entries`() { - val request = GetRemoteIndexesRequest( - indexes = invalidPatterns, - includeMappings = false - ) + val request = + GetRemoteIndexesRequest( + indexes = invalidPatterns, + includeMappings = false, + ) assertFalse(request.isValid()) } + @Test fun `test GetRemoteIndexesRequest isValid with valid and invalid entries`() { - val request = GetRemoteIndexesRequest( - indexes = validPatterns + invalidPatterns, - includeMappings = false - ) + val request = + GetRemoteIndexesRequest( + indexes = validPatterns + invalidPatterns, + includeMappings = false, + ) assertFalse(request.isValid()) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailAccountActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailAccountActionTests.kt index 14942c977..b88d0fef8 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailAccountActionTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailAccountActionTests.kt @@ -6,9 +6,10 @@ package org.opensearch.alerting.action import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class SearchEmailAccountActionTests : OpenSearchTestCase() { - + @Test fun `test search email account action name`() { assertNotNull(SearchEmailAccountAction.INSTANCE.name()) assertEquals(SearchEmailAccountAction.INSTANCE.name(), SearchEmailAccountAction.NAME) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailGroupActionTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailGroupActionTests.kt index 6cd01cf9d..bc97cc582 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailGroupActionTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/action/SearchEmailGroupActionTests.kt @@ -6,9 +6,10 @@ package org.opensearch.alerting.action import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class SearchEmailGroupActionTests : OpenSearchTestCase() { - + @Test fun `test search email group action name`() { assertNotNull(SearchEmailGroupAction.INSTANCE.name()) assertEquals(SearchEmailGroupAction.INSTANCE.name(), SearchEmailGroupAction.NAME) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2RequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2RequestTests.kt index 727225f93..f058128e3 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2RequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2RequestTests.kt @@ -9,13 +9,16 @@ import org.opensearch.action.support.WriteRequest import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class DeleteMonitorV2RequestTests : OpenSearchTestCase() { + @Test fun `test get monitor v2 request as stream`() { - val req = DeleteMonitorV2Request( - monitorV2Id = "abc", - refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE - ) + val req = + DeleteMonitorV2Request( + monitorV2Id = "abc", + refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, + ) assertNotNull(req) val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2ResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2ResponseTests.kt index f10082d8d..81412ff66 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2ResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/DeleteMonitorV2ResponseTests.kt @@ -8,13 +8,16 @@ package org.opensearch.alerting.actionv2 import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class DeleteMonitorV2ResponseTests : OpenSearchTestCase() { + @Test fun `test get monitor v2 request as stream`() { - val req = DeleteMonitorV2Response( - id = "abc", - version = 3L - ) + val req = + DeleteMonitorV2Response( + id = "abc", + version = 3L, + ) assertNotNull(req) val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2RequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2RequestTests.kt index b2eb38a93..0128a1a11 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2RequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2RequestTests.kt @@ -12,16 +12,19 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.common.unit.TimeValue import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class ExecuteMonitorV2RequestTests : OpenSearchTestCase() { + @Test fun `test execute monitor v2 request`() { - val req = ExecuteMonitorV2Request( - dryrun = true, - manual = false, - monitorV2Id = "abc", - monitorV2 = randomPPLMonitor(), - requestEnd = TimeValue.timeValueMinutes(30L) - ) + val req = + ExecuteMonitorV2Request( + dryrun = true, + manual = false, + monitorV2Id = "abc", + monitorV2 = randomPPLMonitor(), + requestEnd = TimeValue.timeValueMinutes(30L), + ) assertNotNull(req) val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2ResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2ResponseTests.kt index e4f932170..abbf0c244 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2ResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/ExecuteMonitorV2ResponseTests.kt @@ -10,21 +10,26 @@ import org.opensearch.alerting.modelv2.PPLSQLTriggerRunResult import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class ExecuteMonitorV2ResponseTests : OpenSearchTestCase() { + @Test fun `test execute monitor response`() { - val monitorRunResult = PPLSQLMonitorRunResult( - monitorName = "some-monitor", - error = IllegalArgumentException("some-error"), - triggerResults = mapOf( - "some-trigger-id" to PPLSQLTriggerRunResult( - triggerName = "some-trigger", - triggered = true, - error = IllegalArgumentException("some-error") - ) - ), - pplQueryResults = mapOf("some-result" to mapOf("some-field" to 3)) - ) + val monitorRunResult = + PPLSQLMonitorRunResult( + monitorName = "some-monitor", + error = IllegalArgumentException("some-error"), + triggerResults = + mapOf( + "some-trigger-id" to + PPLSQLTriggerRunResult( + triggerName = "some-trigger", + triggered = true, + error = IllegalArgumentException("some-error"), + ), + ), + pplQueryResults = mapOf("some-result" to mapOf("some-field" to 3)), + ) val response = ExecuteMonitorV2Response(monitorRunResult) assertNotNull(response) @@ -39,19 +44,23 @@ class ExecuteMonitorV2ResponseTests : OpenSearchTestCase() { assert(newResponse.monitorV2RunResult.triggerResults.containsKey("some-trigger-id")) assertEquals( response.monitorV2RunResult.triggerResults["some-trigger-id"]!!.triggerName, - newResponse.monitorV2RunResult.triggerResults["some-trigger-id"]!!.triggerName + newResponse.monitorV2RunResult.triggerResults["some-trigger-id"]!!.triggerName, ) assertEquals( response.monitorV2RunResult.triggerResults["some-trigger-id"]!!.triggered, - newResponse.monitorV2RunResult.triggerResults["some-trigger-id"]!!.triggered + newResponse.monitorV2RunResult.triggerResults["some-trigger-id"]!!.triggered, ) assertEquals( - response.monitorV2RunResult.triggerResults["some-trigger-id"]!!.error?.message, - newResponse.monitorV2RunResult.triggerResults["some-trigger-id"]!!.error?.message + response.monitorV2RunResult.triggerResults["some-trigger-id"]!! + .error + ?.message, + newResponse.monitorV2RunResult.triggerResults["some-trigger-id"]!! + .error + ?.message, ) assertEquals( (response.monitorV2RunResult as PPLSQLMonitorRunResult).pplQueryResults, - (newResponse.monitorV2RunResult as PPLSQLMonitorRunResult).pplQueryResults + (newResponse.monitorV2RunResult as PPLSQLMonitorRunResult).pplQueryResults, ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2RequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2RequestTests.kt index aa963c8a0..60fc65c1a 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2RequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2RequestTests.kt @@ -9,16 +9,19 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.commons.alerting.model.Table import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetAlertsV2RequestTests : OpenSearchTestCase() { + @Test fun `test get alerts request as stream`() { val table = Table("asc", "sortString", null, 1, 0, "") - val req = GetAlertsV2Request( - table = table, - severityLevel = "1", - monitorV2Ids = listOf("1", "2"), - ) + val req = + GetAlertsV2Request( + table = table, + severityLevel = "1", + monitorV2Ids = listOf("1", "2"), + ) assertNotNull(req) val out = BytesStreamOutput() @@ -32,6 +35,7 @@ class GetAlertsV2RequestTests : OpenSearchTestCase() { assertTrue(newReq.monitorV2Ids!!.contains("2")) } + @Test fun `test get alerts request with filter as stream`() { val table = Table("asc", "sortString", null, 1, 0, "") val req = GetAlertsV2Request(table, "1", null) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2ResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2ResponseTests.kt index 8f803bc00..1bcdfb924 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2ResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetAlertsV2ResponseTests.kt @@ -17,8 +17,10 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.test.OpenSearchTestCase import java.time.Instant import java.util.Collections +import kotlin.test.Test class GetAlertsV2ResponseTests : OpenSearchTestCase() { + @Test fun `test get alerts response with no alerts`() { val req = GetAlertsV2Response(Collections.emptyList(), 0) assertNotNull(req) @@ -31,21 +33,23 @@ class GetAlertsV2ResponseTests : OpenSearchTestCase() { assertEquals(0, newReq.totalAlertV2s) } + @Test fun `test get alerts response with alerts`() { - val alert = AlertV2( - monitorId = "id", - monitorName = "name", - monitorVersion = AlertV2.NO_VERSION, - monitorUser = randomUser(), - triggerId = "triggerId", - triggerName = "triggerNamer", - query = "source = some_index", - queryResults = mapOf(), - triggeredTime = Instant.now(), - errorMessage = null, - severity = TriggerV2.Severity.LOW, - executionId = "executionId" - ) + val alert = + AlertV2( + monitorId = "id", + monitorName = "name", + monitorVersion = AlertV2.NO_VERSION, + monitorUser = randomUser(), + triggerId = "triggerId", + triggerName = "triggerNamer", + query = "source = some_index", + queryResults = mapOf(), + triggeredTime = Instant.now(), + errorMessage = null, + severity = TriggerV2.Severity.LOW, + executionId = "executionId", + ) val res = GetAlertsV2Response(listOf(alert), 1) assertNotNull(res) @@ -58,32 +62,37 @@ class GetAlertsV2ResponseTests : OpenSearchTestCase() { assertEquals(1, newRes.totalAlertV2s) } + @Test fun `test toXContent for get alerts response`() { val now = Instant.now() - val alert = AlertV2( - monitorId = "id", - monitorName = "name", - monitorVersion = AlertV2.NO_VERSION, - monitorUser = randomUser(), - triggerId = "triggerId", - triggerName = "triggerName", - query = "source = some_index", - queryResults = mapOf(), - triggeredTime = now, - errorMessage = null, - severity = TriggerV2.Severity.LOW, - executionId = "executionId" - ) + val alert = + AlertV2( + monitorId = "id", + monitorName = "name", + monitorVersion = AlertV2.NO_VERSION, + monitorUser = randomUser(), + triggerId = "triggerId", + triggerName = "triggerName", + query = "source = some_index", + queryResults = mapOf(), + triggeredTime = now, + errorMessage = null, + severity = TriggerV2.Severity.LOW, + executionId = "executionId", + ) val req = GetAlertsV2Response(listOf(alert), 1) - var actualXContentString = req.toXContent( - XContentBuilder.builder(XContentType.JSON.xContent()), - ToXContent.EMPTY_PARAMS - ).string() - val expectedXContentString = "{\"alerts_v2\":[{\"id\":\"\",\"version\":-1,\"monitor_v2_id\":\"id\",\"schema_version\":0," + - "\"monitor_v2_version\":-1,\"monitor_v2_name\":\"name\",\"execution_id\":\"executionId\",\"trigger_v2_id\":\"triggerId\"," + - "\"trigger_v2_name\":\"triggerName\",\"query\":\"source = some_index\",\"query_results\":{},\"error_message\":null," + - "\"severity\":\"low\",\"triggered_time\":${now.toEpochMilli()}}],\"total_alerts_v2\":1}" + var actualXContentString = + req + .toXContent( + XContentBuilder.builder(XContentType.JSON.xContent()), + ToXContent.EMPTY_PARAMS, + ).string() + val expectedXContentString = + "{\"alerts_v2\":[{\"id\":\"\",\"version\":-1,\"monitor_v2_id\":\"id\",\"schema_version\":0," + + "\"monitor_v2_version\":-1,\"monitor_v2_name\":\"name\",\"execution_id\":\"executionId\",\"trigger_v2_id\":\"triggerId\"," + + "\"trigger_v2_name\":\"triggerName\",\"query\":\"source = some_index\",\"query_results\":{},\"error_message\":null," + + "\"severity\":\"low\",\"triggered_time\":${now.toEpochMilli()}}],\"total_alerts_v2\":1}" logger.info("expected: $expectedXContentString") logger.info("actual: $actualXContentString") diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2RequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2RequestTests.kt index a4ede1ad2..e162a7900 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2RequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2RequestTests.kt @@ -9,14 +9,17 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.search.fetch.subphase.FetchSourceContext import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetMonitorV2RequestTests : OpenSearchTestCase() { + @Test fun `test get monitor v2 request as stream`() { - val req = GetMonitorV2Request( - monitorV2Id = "abc", - version = 2L, - srcContext = FetchSourceContext.FETCH_SOURCE - ) + val req = + GetMonitorV2Request( + monitorV2Id = "abc", + version = 2L, + srcContext = FetchSourceContext.FETCH_SOURCE, + ) assertNotNull(req) val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2ResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2ResponseTests.kt index 11a8c218b..262dafb44 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2ResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/GetMonitorV2ResponseTests.kt @@ -10,16 +10,19 @@ import org.opensearch.alerting.randomPPLMonitor import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class GetMonitorV2ResponseTests : OpenSearchTestCase() { + @Test fun `test get monitor v2 response as stream`() { - val req = GetMonitorV2Response( - id = "abc", - version = 2L, - seqNo = 1L, - primaryTerm = 2L, - monitorV2 = randomPPLMonitor() as MonitorV2 - ) + val req = + GetMonitorV2Response( + id = "abc", + version = 2L, + seqNo = 1L, + primaryTerm = 2L, + monitorV2 = randomPPLMonitor() as MonitorV2, + ) assertNotNull(req) val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2RequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2RequestTests.kt index 1e7c7cd08..78f82e36b 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2RequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2RequestTests.kt @@ -12,18 +12,21 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.rest.RestRequest import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class IndexMonitorV2RequestTests : OpenSearchTestCase() { + @Test fun `test index monitor v2 request as stream`() { - val req = IndexMonitorV2Request( - monitorId = "abc", - seqNo = 1L, - primaryTerm = 1L, - refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, - method = RestRequest.Method.POST, - monitorV2 = randomPPLMonitor() as MonitorV2, - rbacRoles = listOf("role-a", "role-b") - ) + val req = + IndexMonitorV2Request( + monitorId = "abc", + seqNo = 1L, + primaryTerm = 1L, + refreshPolicy = WriteRequest.RefreshPolicy.IMMEDIATE, + method = RestRequest.Method.POST, + monitorV2 = randomPPLMonitor() as MonitorV2, + rbacRoles = listOf("role-a", "role-b"), + ) assertNotNull(req) val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2ResponseTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2ResponseTests.kt index 2de07698e..e388d7144 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2ResponseTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/IndexMonitorV2ResponseTests.kt @@ -10,16 +10,19 @@ import org.opensearch.alerting.randomPPLMonitor import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class IndexMonitorV2ResponseTests : OpenSearchTestCase() { + @Test fun `test index monitor v2 response as stream`() { - val req = IndexMonitorV2Response( - id = "abc", - version = 2L, - seqNo = 1L, - primaryTerm = 1L, - monitorV2 = randomPPLMonitor() as MonitorV2 - ) + val req = + IndexMonitorV2Response( + id = "abc", + version = 2L, + seqNo = 1L, + primaryTerm = 1L, + monitorV2 = randomPPLMonitor() as MonitorV2, + ) assertNotNull(req) val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2RequestTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2RequestTests.kt index 987a738a2..b91aab38c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2RequestTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/actionv2/SearchMonitorV2RequestTests.kt @@ -14,8 +14,10 @@ import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.rest.OpenSearchRestTestCase import java.util.concurrent.TimeUnit +import kotlin.test.Test class SearchMonitorV2RequestTests : OpenSearchTestCase() { + @Test fun `test search monitors request`() { val searchSourceBuilder = SearchSourceBuilder().from(0).size(100).timeout(TimeValue(60, TimeUnit.SECONDS)) val searchRequest = SearchRequest().indices(OpenSearchRestTestCase.randomAlphaOfLength(10)).source(searchSourceBuilder) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt index 60021e20b..e6aa05454 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilderTests.kt @@ -16,9 +16,7 @@ import org.opensearch.search.aggregations.bucket.terms.IncludeExclude import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy class BucketSelectorExtAggregationBuilderTests : BasePipelineAggregationTestCase() { - override fun plugins(): List { - return listOf(AlertingPlugin()) - } + override fun plugins(): List = listOf(AlertingPlugin()) override fun createTestAggregatorFactory(): BucketSelectorExtAggregationBuilder { val name = randomAlphaOfLengthBetween(3, 20) @@ -36,18 +34,24 @@ class BucketSelectorExtAggregationBuilderTests : BasePipelineAggregationTestCase params["foo"] = "bar" } val type = randomFrom(*ScriptType.values()) - script = Script( - type, - if (type == ScriptType.STORED) null else randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), - "script", params - ) + script = + Script( + type, + if (type == ScriptType.STORED) null else randomFrom("my_lang", Script.DEFAULT_SCRIPT_LANG), + "script", + params, + ) } val parentBucketPath = randomAlphaOfLengthBetween(3, 20) val filter = BucketSelectorExtFilter(IncludeExclude("foo.*", "bar.*")) - val factory = BucketSelectorExtAggregationBuilder( - name, bucketsPaths, - script, parentBucketPath, filter - ) + val factory = + BucketSelectorExtAggregationBuilder( + name, + bucketsPaths, + script, + parentBucketPath, + filter, + ) if (randomBoolean()) { factory.gapPolicy(randomFrom(*GapPolicy.values())) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt index 9e1e2437f..8743321c6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/alerts/AlertIndicesIT.kt @@ -25,9 +25,10 @@ import org.opensearch.commons.alerting.model.ScheduledJob import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import java.util.concurrent.TimeUnit +import kotlin.test.Test class AlertIndicesIT : AlertingRestTestCase() { - + @Test fun `test create alert index`() { executeMonitor(randomQueryLevelMonitor(triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN)))) @@ -35,6 +36,7 @@ class AlertIndicesIT : AlertingRestTestCase() { assertIndexExists(AlertIndices.ALERT_HISTORY_WRITE_INDEX) } + @Test fun `test create finding index`() { val testIndex = createTestIndex() val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) @@ -47,14 +49,18 @@ class AlertIndicesIT : AlertingRestTestCase() { assertIndexExists(AlertIndices.FINDING_HISTORY_WRITE_INDEX) } + @Test fun `test update alert index mapping with new schema version`() { wipeAllODFEIndices() assertIndexDoesNotExist(AlertIndices.ALERT_INDEX) assertIndexDoesNotExist(AlertIndices.ALERT_HISTORY_WRITE_INDEX) putAlertMappings( - AlertIndices.alertMapping().trimStart('{').trimEnd('}') - .replace("\"schema_version\": 5", "\"schema_version\": 0") + AlertIndices + .alertMapping() + .trimStart('{') + .trimEnd('}') + .replace("\"schema_version\": 5", "\"schema_version\": 0"), ) assertIndexExists(AlertIndices.ALERT_INDEX) assertIndexExists(AlertIndices.ALERT_HISTORY_WRITE_INDEX) @@ -69,13 +75,17 @@ class AlertIndicesIT : AlertingRestTestCase() { verifyIndexSchemaVersion(AlertIndices.ALERT_HISTORY_WRITE_INDEX, 5) } + @Test fun `test update finding index mapping with new schema version`() { wipeAllODFEIndices() assertIndexDoesNotExist(AlertIndices.FINDING_HISTORY_WRITE_INDEX) putFindingMappings( - AlertIndices.findingMapping().trimStart('{').trimEnd('}') - .replace("\"schema_version\": 4", "\"schema_version\": 0") + AlertIndices + .findingMapping() + .trimStart('{') + .trimEnd('}') + .replace("\"schema_version\": 4", "\"schema_version\": 0"), ) assertIndexExists(AlertIndices.FINDING_HISTORY_WRITE_INDEX) verifyIndexSchemaVersion(AlertIndices.FINDING_HISTORY_WRITE_INDEX, 0) @@ -92,6 +102,7 @@ class AlertIndicesIT : AlertingRestTestCase() { verifyIndexSchemaVersion(AlertIndices.FINDING_HISTORY_WRITE_INDEX, 4) } + @Test fun `test alert index gets recreated automatically if deleted`() { wipeAllODFEIndices() assertIndexDoesNotExist(AlertIndices.ALERT_INDEX) @@ -110,6 +121,7 @@ class AlertIndicesIT : AlertingRestTestCase() { assertNull("Error running a monitor after wiping alert indices", output["error"]) } + @Test fun `test finding index gets recreated automatically if deleted`() { wipeAllODFEIndices() assertIndexDoesNotExist(AlertIndices.FINDING_HISTORY_WRITE_INDEX) @@ -131,6 +143,7 @@ class AlertIndicesIT : AlertingRestTestCase() { assertNull("Error running a monitor after wiping finding indices", output["error"]) } + @Test fun `test rollover alert history index`() { // Update the rollover check to be every 1 second and the index max age to be 1 second client().updateSettings(AlertingSettings.ALERT_HISTORY_ROLLOVER_PERIOD.key, "1s") @@ -146,6 +159,7 @@ class AlertIndicesIT : AlertingRestTestCase() { assertTrue("Did not find 3 alert indices", getAlertIndices().size >= 3) } + @Test fun `test rollover finding history index`() { // Update the rollover check to be every 1 second and the index max age to be 1 second client().updateSettings(AlertingSettings.FINDING_HISTORY_ROLLOVER_PERIOD.key, "1s") @@ -165,6 +179,7 @@ class AlertIndicesIT : AlertingRestTestCase() { assertTrue("Did not find 2 alert indices", getFindingIndices().size >= 2) } + @Test fun `test alert history disabled`() { resetHistorySettings() @@ -209,6 +224,7 @@ class AlertIndicesIT : AlertingRestTestCase() { assertEquals(1, getAlertHistoryDocCount()) } + @Test fun `test short retention period`() { resetHistorySettings() @@ -259,6 +275,7 @@ class AlertIndicesIT : AlertingRestTestCase() { assertEquals(0, getAlertHistoryDocCount()) } + @Test fun `test short finding retention period`() { resetHistorySettings() @@ -358,18 +375,25 @@ class AlertIndicesIT : AlertingRestTestCase() { } private fun getAlertHistoryDocCount(): Long { - val request = """ + val request = + """ { "query": { "match_all": {} } } - """.trimIndent() - val response = adminClient().makeRequest( - "POST", "${AlertIndices.ALERT_HISTORY_ALL}/_search", emptyMap(), - StringEntity(request, APPLICATION_JSON) - ) + """.trimIndent() + val response = + adminClient().makeRequest( + "POST", + "${AlertIndices.ALERT_HISTORY_ALL}/_search", + emptyMap(), + StringEntity(request, APPLICATION_JSON), + ) assertEquals("Request to get alert history failed", RestStatus.OK, response.restStatus()) - return SearchResponse.fromXContent(createParser(jsonXContent, response.entity.content)).hits.totalHits!!.value + return SearchResponse + .fromXContent(createParser(jsonXContent, response.entity.content)) + .hits.totalHits!! + .value } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/alertsv2/AlertV2IndicesIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/alertsv2/AlertV2IndicesIT.kt index 672bc3902..9bfafc15f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/alertsv2/AlertV2IndicesIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/alertsv2/AlertV2IndicesIT.kt @@ -27,6 +27,7 @@ import org.opensearch.core.rest.RestStatus import org.opensearch.test.OpenSearchTestCase import java.time.temporal.ChronoUnit.MINUTES import java.util.concurrent.TimeUnit +import kotlin.test.Test /** * Tests AlertV2 history migration, AlertV2 deletion, and AlertV2 expiration functionality @@ -41,6 +42,7 @@ class AlertV2IndicesIT : AlertingRestTestCase() { client().updateSettings(AlertingV2Settings.ALERTING_V2_ENABLED.key, "true") } + @Test fun `test create alert v2 index`() { generateAlertV2s() @@ -48,14 +50,18 @@ class AlertV2IndicesIT : AlertingRestTestCase() { assertIndexExists(AlertV2Indices.ALERT_V2_HISTORY_WRITE_INDEX) } + @Test fun `test update alert v2 index mapping with new schema version`() { wipeAllODFEIndices() assertIndexDoesNotExist(AlertV2Indices.ALERT_V2_INDEX) assertIndexDoesNotExist(AlertV2Indices.ALERT_V2_HISTORY_WRITE_INDEX) putAlertV2Mappings( - AlertV2Indices.alertV2Mapping().trimStart('{').trimEnd('}') - .replace("\"schema_version\": 1", "\"schema_version\": 0") + AlertV2Indices + .alertV2Mapping() + .trimStart('{') + .trimEnd('}') + .replace("\"schema_version\": 1", "\"schema_version\": 0"), ) assertIndexExists(AlertV2Indices.ALERT_V2_INDEX) assertIndexExists(AlertV2Indices.ALERT_V2_HISTORY_WRITE_INDEX) @@ -72,6 +78,7 @@ class AlertV2IndicesIT : AlertingRestTestCase() { verifyIndexSchemaVersion(AlertV2Indices.ALERT_V2_HISTORY_WRITE_INDEX, 1) } + @Test fun `test alert v2 index gets recreated automatically if deleted`() { wipeAllODFEIndices() assertIndexDoesNotExist(AlertV2Indices.ALERT_V2_INDEX) @@ -88,6 +95,7 @@ class AlertV2IndicesIT : AlertingRestTestCase() { generateAlertV2s() } + @Test fun `test rollover alert v2 history index`() { // Update the rollover check to be every 1 second and the index max age to be 1 second client().updateSettings(AlertingSettings.ALERT_V2_HISTORY_ROLLOVER_PERIOD.key, "1s") @@ -103,27 +111,30 @@ class AlertV2IndicesIT : AlertingRestTestCase() { assertTrue("Did not find 3 alert v2 indices", getAlertV2Indices().size >= 3) } + @Test fun `test alert v2 history disabled`() { resetHistorySettings() // Disable alert history client().updateSettings(AlertingSettings.ALERT_V2_HISTORY_ENABLED.key, "false") - val pplMonitorId = generateAlertV2s( - randomPPLMonitor( - schedule = IntervalSchedule(interval = 30, unit = MINUTES), - query = "source = $TEST_INDEX_NAME | head 3", - triggers = listOf( - randomPPLTrigger( - mode = PPLSQLTrigger.TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - expireDuration = 1L - ) - ) + val pplMonitorId = + generateAlertV2s( + randomPPLMonitor( + schedule = IntervalSchedule(interval = 30, unit = MINUTES), + query = "source = $TEST_INDEX_NAME | head 3", + triggers = + listOf( + randomPPLTrigger( + mode = PPLSQLTrigger.TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + expireDuration = 1L, + ), + ), + ), ) - ) val alerts1 = searchAlertV2s(pplMonitorId) assertEquals("1 alert should be present", 1, alerts1.size) @@ -140,24 +151,27 @@ class AlertV2IndicesIT : AlertingRestTestCase() { assertTrue("There should be no alerts, but alerts were found", alerts2.isEmpty()) } + @Test fun `test short retention period`() { resetHistorySettings() - val pplMonitorId = generateAlertV2s( - randomPPLMonitor( - schedule = IntervalSchedule(interval = 30, unit = MINUTES), - query = "source = $TEST_INDEX_NAME | head 3", - triggers = listOf( - randomPPLTrigger( - mode = PPLSQLTrigger.TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - expireDuration = 1L - ) - ) + val pplMonitorId = + generateAlertV2s( + randomPPLMonitor( + schedule = IntervalSchedule(interval = 30, unit = MINUTES), + query = "source = $TEST_INDEX_NAME | head 3", + triggers = + listOf( + randomPPLTrigger( + mode = PPLSQLTrigger.TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + expireDuration = 1L, + ), + ), + ), ) - ) val alerts1 = searchAlertV2s(pplMonitorId) assertEquals("1 alert should be present", 1, alerts1.size) @@ -191,29 +205,32 @@ class AlertV2IndicesIT : AlertingRestTestCase() { assertEquals(0, getAlertV2HistoryDocCount()) } + @Test fun `test generated alert gets expired because monitor was deleted with alert history enabled`() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 20, unit = MINUTES), - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - // for this test, configured expire can't be the reason for alert expiration - expireDuration = 1000L, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 20, unit = MINUTES), + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + // for this test, configured expire can't be the reason for alert expiration + expireDuration = 1000L, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -239,29 +256,32 @@ class AlertV2IndicesIT : AlertingRestTestCase() { assertEquals(1, getAlertV2HistoryDocCount()) } + @Test fun `test generated alert gets expired because monitor was edited with alert history enabled`() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) // first create a ppl monitor that's guaranteed to generate an alert - val initialPplTrigger = randomPPLTrigger( - id = "initialID", - throttleDuration = null, - // for this test, configured expire can't be the reason for alert expiration - expireDuration = 1000L, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val initialPplTrigger = + randomPPLTrigger( + id = "initialID", + throttleDuration = null, + // for this test, configured expire can't be the reason for alert expiration + expireDuration = 1000L, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ) - val initialPplMonitorConfig = randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 20, unit = MINUTES), - triggers = listOf(initialPplTrigger), - query = "source = $TEST_INDEX_NAME | head 10" - ) + val initialPplMonitorConfig = + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 20, unit = MINUTES), + triggers = listOf(initialPplTrigger), + query = "source = $TEST_INDEX_NAME | head 10", + ) val pplMonitor = createRandomPPLMonitor(initialPplMonitorConfig) @@ -290,31 +310,34 @@ class AlertV2IndicesIT : AlertingRestTestCase() { assertEquals(1, getAlertV2HistoryDocCount()) } + @Test fun `test generated alert gets expired because monitor was deleted with alert history disabled`() { client().updateSettings(AlertingSettings.ALERT_V2_HISTORY_ENABLED.key, "false") createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) - val pplMonitor = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 20, unit = MINUTES), - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - // for this test, configured expire can't be the reason for alert expiration - expireDuration = 1000L, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitor = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 20, unit = MINUTES), + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + // for this test, configured expire can't be the reason for alert expiration + expireDuration = 1000L, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -340,6 +363,7 @@ class AlertV2IndicesIT : AlertingRestTestCase() { assertEquals(0, getAlertV2HistoryDocCount()) } + @Test fun `test generated alert gets expired because monitor was edited with alert history disabled`() { client().updateSettings(AlertingSettings.ALERT_V2_HISTORY_ENABLED.key, "false") @@ -347,24 +371,26 @@ class AlertV2IndicesIT : AlertingRestTestCase() { indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) // first create a ppl monitor that's guaranteed to generate an alert - val initialPplTrigger = randomPPLTrigger( - id = "initialID", - throttleDuration = null, - // for this test, configured expire can't be the reason for alert expiration - expireDuration = 1000L, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val initialPplTrigger = + randomPPLTrigger( + id = "initialID", + throttleDuration = null, + // for this test, configured expire can't be the reason for alert expiration + expireDuration = 1000L, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ) - val initialPplMonitorConfig = randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 20, unit = MINUTES), - triggers = listOf(initialPplTrigger), - query = "source = $TEST_INDEX_NAME | head 10" - ) + val initialPplMonitorConfig = + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 20, unit = MINUTES), + triggers = listOf(initialPplTrigger), + query = "source = $TEST_INDEX_NAME | head 10", + ) val pplMonitor = createRandomPPLMonitor(initialPplMonitorConfig) @@ -420,16 +446,18 @@ class AlertV2IndicesIT : AlertingRestTestCase() { // generates alerts by creating then executing a monitor private fun generateAlertV2s( - pplMonitorConfig: PPLSQLMonitor = randomPPLMonitor( - query = "source = $TEST_INDEX_NAME | head 3", - triggers = listOf( - randomPPLTrigger( - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L - ) - ) - ) + pplMonitorConfig: PPLSQLMonitor = + randomPPLMonitor( + query = "source = $TEST_INDEX_NAME | head 3", + triggers = + listOf( + randomPPLTrigger( + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + ), + ), + ), ): String { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 5) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt index c2f6b5fd0..60ade026d 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/bwc/AlertingBackwardsCompatibilityIT.kt @@ -30,9 +30,9 @@ import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import java.time.temporal.ChronoUnit import java.util.concurrent.TimeUnit +import kotlin.test.Test class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { - companion object { private val CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.bwcsuite")) private val CLUSTER_NAME = System.getProperty("tests.clustername") @@ -46,18 +46,19 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { override fun preserveODFEIndicesAfterTest(): Boolean = true - override fun restClientSettings(): Settings { - return Settings.builder() + override fun restClientSettings(): Settings = + Settings + .builder() .put(super.restClientSettings()) // increase the timeout here to 90 seconds to handle long waits for a green // cluster health. the waits for green need to be longer than a minute to // account for delayed shards .put(CLIENT_SOCKET_TIMEOUT, "90s") .build() - } @Throws(Exception::class) @Suppress("UNCHECKED_CAST") + @Test fun `test backwards compatibility`() { val uri = getPluginUri() val responseMap = getAsMap(uri)["nodes"] as Map> @@ -69,6 +70,7 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { assertTrue(pluginNames.contains("opensearch-alerting")) createBasicMonitor() } + ClusterType.MIXED -> { assertTrue(pluginNames.contains("opensearch-alerting")) verifyMonitorExists(ALERTING_BASE_URI) @@ -76,6 +78,7 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { // MONITOR_BASE_URI verifyMonitorStats("/_plugins/_alerting") } + ClusterType.UPGRADED -> { assertTrue(pluginNames.contains("opensearch-alerting")) verifyMonitorExists(ALERTING_BASE_URI) @@ -106,6 +109,7 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { @Throws(Exception::class) @Suppress("UNCHECKED_CAST") + @Test fun `test backwards compatibility for doc-level monitors`() { val uri = getPluginUri() val responseMap = getAsMap(uri)["nodes"] as Map> @@ -117,10 +121,12 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { assertTrue(pluginNames.contains("opensearch-alerting")) createDocLevelMonitor() } + ClusterType.MIXED -> { assertTrue(pluginNames.contains("opensearch-alerting")) verifyMonitorExecutionSuccess() } + ClusterType.UPGRADED -> { assertTrue(pluginNames.contains("opensearch-alerting")) verifyMonitorExecutionSuccess() @@ -133,23 +139,26 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { private enum class ClusterType { OLD, MIXED, - UPGRADED; + UPGRADED, + ; companion object { - fun parse(value: String): ClusterType { - return when (value) { + fun parse(value: String): ClusterType = + when (value) { "old_cluster" -> OLD "mixed_cluster" -> MIXED "upgraded_cluster" -> UPGRADED else -> throw AssertionError("Unknown cluster type: $value") } - } } } - private fun getPluginUri(): String { - return when (CLUSTER_TYPE) { - ClusterType.OLD -> "_nodes/$CLUSTER_NAME-0/plugins" + private fun getPluginUri(): String = + when (CLUSTER_TYPE) { + ClusterType.OLD -> { + "_nodes/$CLUSTER_NAME-0/plugins" + } + ClusterType.MIXED -> { when (System.getProperty("tests.rest.bwcsuite_round")) { "second" -> "_nodes/$CLUSTER_NAME-1/plugins" @@ -157,14 +166,17 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { else -> "_nodes/$CLUSTER_NAME-0/plugins" } } - ClusterType.UPGRADED -> "_nodes/plugins" + + ClusterType.UPGRADED -> { + "_nodes/plugins" + } } - } @Throws(Exception::class) private fun createBasicMonitor() { val indexName = "test_bwc_index" - val bwcMonitorString = """ + val bwcMonitorString = + """ { "owner": "alerting", "type": "monitor", @@ -200,15 +212,16 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { "actions": [] }] } - """.trimIndent() + """.trimIndent() createIndex(indexName, Settings.EMPTY) - val createResponse = client().makeRequest( - method = "POST", - endpoint = "$ALERTING_BASE_URI?refresh=true", - params = emptyMap(), - entity = StringEntity(bwcMonitorString, APPLICATION_JSON) - ) + val createResponse = + client().makeRequest( + method = "POST", + endpoint = "$ALERTING_BASE_URI?refresh=true", + params = emptyMap(), + entity = StringEntity(bwcMonitorString, APPLICATION_JSON), + ) assertEquals("Create monitor failed", RestStatus.CREATED, createResponse.restStatus()) val responseBody = createResponse.asMap() @@ -224,15 +237,16 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor( - name = "test-monitor", - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - enabled = true, - schedule = IntervalSchedule(1, ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + name = "test-monitor", + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + enabled = true, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES), + ), ) - ) assertNotNull(monitor.id) return Pair(monitor, testIndex) } @@ -241,12 +255,13 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { @Suppress("UNCHECKED_CAST") private fun verifyMonitorExists(uri: String) { val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() - val searchResponse = client().makeRequest( - "GET", - "$uri/_search", - emptyMap(), - StringEntity(search, APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$uri/_search", + emptyMap(), + StringEntity(search, APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -256,16 +271,19 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { @Throws(Exception::class) @Suppress("UNCHECKED_CAST") - /** + /* * Monitor stats will check if the Monitor scheduled job is running on time but does not necessarily mean that the * Monitor execution itself did not fail. */ - private fun verifyMonitorStats(uri: String) { - val statsResponse = client().makeRequest( - "GET", - "$uri/stats", - emptyMap() - ) + private fun verifyMonitorStats( + uri: String, + ) { + val statsResponse = + client().makeRequest( + "GET", + "$uri/stats", + emptyMap(), + ) assertEquals("Monitor stats failed", RestStatus.OK, statsResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), statsResponse.entity.content) val responseMap = xcp.map() @@ -297,7 +315,11 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { val client = client() client.setNodes(nodes) val searchResponse = searchMonitors() - val monitorId = searchResponse.hits.hits.filter { it.sourceAsMap["name"] == "test-monitor" }.first().id + val monitorId = + searchResponse.hits.hits + .filter { it.sourceAsMap["name"] == "test-monitor" } + .first() + .id val testIndex = "test-index" val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) @@ -317,6 +339,7 @@ class AlertingBackwardsCompatibilityIT : AlertingRestTestCase() { assertEquals("test-monitor", output["monitor_name"]) @Suppress("UNCHECKED_CAST") val searchResult = (output.objectMap("input_results")["results"] as List>).first() + @Suppress("UNCHECKED_CAST") val matchingDocsToQuery = searchResult["4"] as List passed = matchingDocsToQuery.isNotEmpty() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionParserTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionParserTests.kt index 7ebc82697..7cec72264 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionParserTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionParserTests.kt @@ -8,9 +8,10 @@ package org.opensearch.alerting.chainedAlertCondition import org.junit.Assert import org.opensearch.alerting.chainedAlertCondition.parsers.ChainedAlertExpressionParser import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class ChainedAlertsExpressionParserTests : OpenSearchTestCase() { - + @Test fun `test trigger expression posix parsing simple AND`() { val eqString = "(monitor[id=abc] && monitor[id=xyz])" val equation = ChainedAlertExpressionParser(eqString).parse() @@ -18,6 +19,7 @@ class ChainedAlertsExpressionParserTests : OpenSearchTestCase() { Assert.assertTrue(expectedEquation == equation.toString()) } + @Test fun `test trigger expression posix parsing simple AND without parentheses`() { val eqString = "monitor[id=abc] && monitor[id=xyz]" val equation = ChainedAlertExpressionParser(eqString).parse() @@ -25,60 +27,68 @@ class ChainedAlertsExpressionParserTests : OpenSearchTestCase() { Assert.assertTrue(expectedEquation == equation.toString()) } + @Test fun `test trigger expression posix parsing multiple AND`() { val eqString = "(monitor[id=abc] && monitor[id=def]) && monitor[id=ghi]" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=abc] monitor[id=def] && monitor[id=ghi] && ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple AND with parenthesis`() { val eqString = "(monitor[id=sigma-123] && monitor[id=sigma-456]) && (monitor[id=sigma-789] && monitor[id=id-2aw34])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals( "monitor[id=sigma-123] monitor[id=sigma-456] && monitor[id=sigma-789] monitor[id=id-2aw34] && && ", - equation.toString() + equation.toString(), ) } + @Test fun `test trigger expression posix parsing simple OR`() { val eqString = "(monitor[id=sigma-123] || monitor[id=sigma-456])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=sigma-123] monitor[id=sigma-456] || ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple OR`() { val eqString = "(monitor[id=sigma-123] || monitor[id=sigma-456]) || monitor[id=sigma-789]" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=sigma-123] monitor[id=sigma-456] || monitor[id=sigma-789] || ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple OR with parenthesis`() { val eqString = "(monitor[id=sigma-123] || monitor[id=sigma-456]) || (monitor[id=sigma-789] || monitor[id=id-2aw34])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals( "monitor[id=sigma-123] monitor[id=sigma-456] || monitor[id=sigma-789] monitor[id=id-2aw34] || || ", - equation.toString() + equation.toString(), ) } + @Test fun `test trigger expression posix parsing simple NOT`() { val eqString = "(monitor[id=sigma-123] || !monitor[id=sigma-456])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=sigma-123] monitor[id=sigma-456] ! || ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple NOT`() { val eqString = "(monitor[id=sigma-123] && !monitor[tag=tag-456]) && !(monitor[id=sigma-789])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=sigma-123] monitor[tag=tag-456] ! && monitor[id=sigma-789] ! && ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple operators with parenthesis`() { val eqString = "(monitor[id=sigma-123] && monitor[tag=sev1]) || !(!monitor[id=sigma-789] || monitor[id=id-2aw34])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals( "monitor[id=sigma-123] monitor[tag=sev1] && monitor[id=sigma-789] ! monitor[id=id-2aw34] || ! || ", - equation.toString() + equation.toString(), ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionResolveTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionResolveTests.kt index a0851d58d..80c742fa6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionResolveTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/chainedAlertCondition/ChainedAlertsExpressionResolveTests.kt @@ -8,111 +8,130 @@ package org.opensearch.alerting.chainedAlertCondition import org.junit.Assert import org.opensearch.alerting.chainedAlertCondition.parsers.ChainedAlertExpressionParser import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class ChainedAlertsExpressionResolveTests : OpenSearchTestCase() { - + @Test fun `test chained alert trigger expression evaluation simple AND`() { val eqString = "(monitor[id=123] && monitor[id=456])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=123] monitor[id=456] && ", equation.toString()) - val alertGeneratingMonitors: Set = setOf( - "123", - "456" - ) + val alertGeneratingMonitors: Set = + setOf( + "123", + "456", + ) Assert.assertTrue(equation.evaluate(alertGeneratingMonitors)) - val alertGeneratingMonitors2: Set = setOf( - "123", - "789" - ) + val alertGeneratingMonitors2: Set = + setOf( + "123", + "789", + ) Assert.assertFalse(equation.evaluate(alertGeneratingMonitors2)) } + @Test fun `test chained alert trigger expression evaluation AND with NOT`() { val eqString = "(monitor[id=123] && !monitor[id=456])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=123] monitor[id=456] ! && ", equation.toString()) - val alertGeneratingMonitors: Set = setOf( - "123", - "456" - ) + val alertGeneratingMonitors: Set = + setOf( + "123", + "456", + ) Assert.assertFalse(equation.evaluate(alertGeneratingMonitors)) - val alertGeneratingMonitors1: Set = setOf( - "123", - "223" - ) + val alertGeneratingMonitors1: Set = + setOf( + "123", + "223", + ) Assert.assertTrue(equation.evaluate(alertGeneratingMonitors1)) } + @Test fun `test chained alert trigger expression evaluation simple OR`() { val eqString = "(monitor[id=123] || monitor[id=456])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=123] monitor[id=456] || ", equation.toString()) - val alertGeneratingMonitors: Set = setOf( - "123", - "456" - ) + val alertGeneratingMonitors: Set = + setOf( + "123", + "456", + ) Assert.assertTrue(equation.evaluate(alertGeneratingMonitors)) - val alertGeneratingMonitors2: Set = setOf( - "234", - "567" - ) + val alertGeneratingMonitors2: Set = + setOf( + "234", + "567", + ) Assert.assertFalse(equation.evaluate(alertGeneratingMonitors2)) } + @Test fun `test chained alert trigger expression evaluation OR with NOT`() { val eqString = "(monitor[id=123] || !monitor[id=456])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=123] monitor[id=456] ! || ", equation.toString()) - val alertGeneratingMonitors: Set = setOf( - "123", - "456" - ) + val alertGeneratingMonitors: Set = + setOf( + "123", + "456", + ) Assert.assertTrue(equation.evaluate(alertGeneratingMonitors)) - val alertGeneratingMonitors2: Set = setOf( - "456" - ) + val alertGeneratingMonitors2: Set = + setOf( + "456", + ) Assert.assertFalse(equation.evaluate(alertGeneratingMonitors2)) } + @Test fun `test chained alert trigger expression evaluation simple NOT`() { val eqString = "!(monitor[id=456])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals("monitor[id=456] ! ", equation.toString()) - val alertGeneratingMonitors: Set = setOf( - "123" - ) + val alertGeneratingMonitors: Set = + setOf( + "123", + ) Assert.assertTrue(equation.evaluate(alertGeneratingMonitors)) - val alertGeneratingMonitors2: Set = setOf( - "456" - ) + val alertGeneratingMonitors2: Set = + setOf( + "456", + ) Assert.assertFalse(equation.evaluate(alertGeneratingMonitors2)) } + @Test fun `test chained alert trigger expression evaluation with multiple operators with parenthesis`() { val eqString = "(monitor[id=123] && monitor[id=456]) || !(!monitor[id=789] || monitor[id=abc])" val equation = ChainedAlertExpressionParser(eqString).parse() Assert.assertEquals( "monitor[id=123] monitor[id=456] && monitor[id=789] ! monitor[id=abc] || ! || ", - equation.toString() + equation.toString(), ) // part 1 evaluates, part 2 evaluates - val alertGeneratingMonitors1: Set = setOf( - "123", - "456", - "789", - "abc" - ) + val alertGeneratingMonitors1: Set = + setOf( + "123", + "456", + "789", + "abc", + ) Assert.assertTrue(equation.evaluate(alertGeneratingMonitors1)) // part 1 not evaluates, part 2 not evaluates - val alertGeneratingMonitors2: Set = setOf( - "789", - "abc" - ) + val alertGeneratingMonitors2: Set = + setOf( + "789", + "abc", + ) Assert.assertFalse(equation.evaluate(alertGeneratingMonitors2)) // part 1 not evaluates, part 2 evaluates - val alertGeneratingMonitors3: Set = setOf( - "789" - ) + val alertGeneratingMonitors3: Set = + setOf( + "789", + ) Assert.assertTrue(equation.evaluate(alertGeneratingMonitors3)) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertContextTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertContextTests.kt index 0f002ba22..2a1774772 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertContextTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertContextTests.kt @@ -11,17 +11,19 @@ import org.opensearch.alerting.randomFinding import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test @Suppress("UNCHECKED_CAST") class AlertContextTests : OpenSearchTestCase() { - + @Test fun `test AlertContext asTemplateArg with null associatedQueries and null sampleDocs`() { val associatedQueries: List? = null val sampleDocs: List>? = null - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -30,13 +32,15 @@ class AlertContextTests : OpenSearchTestCase() { assertNull("Template sample docs should be null", templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with null associatedQueries and 0 sampleDocs`() { val associatedQueries: List? = null val sampleDocs: List> = listOf() - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -45,18 +49,20 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args sample docs should have size ${sampleDocs!!.size}", sampleDocs!!.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with null associatedQueries and 1 sampleDocs`() { val associatedQueries: List? = null val sampleDocs: List> = listOf(randomFinding().asTemplateArg()) - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -65,18 +71,20 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with null associatedQueries and multiple sampleDocs`() { val associatedQueries: List? = null val sampleDocs: List> = (0..2).map { randomFinding().asTemplateArg() } - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -85,18 +93,20 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with 0 associatedQueries and null sampleDocs`() { val associatedQueries: List = listOf() val sampleDocs: List>? = null - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -104,23 +114,25 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertNull("Template sample docs should be null", templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with 1 associatedQueries and null sampleDocs`() { val associatedQueries: List = listOf(randomDocLevelQuery()) val sampleDocs: List>? = null - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -128,23 +140,25 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertNull("Template sample docs should be null", templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with multiple associatedQueries and null sampleDocs`() { val associatedQueries: List = (0..2).map { randomDocLevelQuery() } val sampleDocs: List>? = null - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -152,23 +166,25 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertNull("Template sample docs should be null", templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with 0 associatedQueries and 0 sampleDocs`() { val associatedQueries: List = listOf() val sampleDocs: List> = listOf() - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -176,29 +192,31 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with 0 associatedQueries and 1 sampleDocs`() { val associatedQueries: List = listOf() val sampleDocs: List> = listOf(randomFinding().asTemplateArg()) - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -206,29 +224,31 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with 0 associatedQueries and multiple sampleDocs`() { val associatedQueries: List = listOf() val sampleDocs: List> = (0..2).map { randomFinding().asTemplateArg() } - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -236,29 +256,31 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with 1 associatedQueries and 0 sampleDocs`() { val associatedQueries: List = listOf(randomDocLevelQuery()) val sampleDocs: List> = listOf() - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -266,29 +288,31 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with multiple associatedQueries and 0 sampleDocs`() { val associatedQueries: List = (0..2).map { randomDocLevelQuery() } val sampleDocs: List> = listOf() - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -296,29 +320,31 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with 1 associatedQueries and 1 sampleDocs`() { val associatedQueries: List = listOf(randomDocLevelQuery()) val sampleDocs: List> = listOf(randomFinding().asTemplateArg()) - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -326,29 +352,31 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } + @Test fun `test AlertContext asTemplateArg with multiple associatedQueries and multiple sampleDocs`() { val associatedQueries: List = (0..2).map { randomDocLevelQuery() } val sampleDocs: List> = (0..2).map { randomFinding().asTemplateArg() } - val alertContext: AlertContext = randomAlertContext( - associatedQueries = associatedQueries, - sampleDocs = sampleDocs - ) + val alertContext: AlertContext = + randomAlertContext( + associatedQueries = associatedQueries, + sampleDocs = sampleDocs, + ) val templateArgs = alertContext.asTemplateArg() @@ -356,23 +384,26 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args associated queries should have size ${associatedQueries.size}", associatedQueries.size, - (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size + (templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] as List).size, ) assertEquals( "Template associated queries do not match", formatAssociatedQueries(alertContext), - templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD] + templateArgs[AlertContext.ASSOCIATED_QUERIES_FIELD], ) assertEquals( "Template args sample docs should have size ${sampleDocs.size}", sampleDocs.size, - (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size + (templateArgs[AlertContext.SAMPLE_DOCS_FIELD] as List>).size, ) assertEquals("Template args sample docs do not match", alertContext.sampleDocs, templateArgs[AlertContext.SAMPLE_DOCS_FIELD]) } - private fun assertAlertIsEqual(alertContext: AlertContext, templateArgs: Map) { + private fun assertAlertIsEqual( + alertContext: AlertContext, + templateArgs: Map, + ) { assertEquals("Template args id does not match", alertContext.alert.id, templateArgs[Alert.ALERT_ID_FIELD]) assertEquals("Template args version does not match", alertContext.alert.version, templateArgs[Alert.ALERT_VERSION_FIELD]) assertEquals("Template args state does not match", alertContext.alert.state.toString(), templateArgs[Alert.STATE_FIELD]) @@ -385,17 +416,16 @@ class AlertContextTests : OpenSearchTestCase() { assertEquals( "Template args clusters does not match", alertContext.alert.clusters?.joinToString(","), - templateArgs[Alert.CLUSTERS_FIELD] + templateArgs[Alert.CLUSTERS_FIELD], ) } - private fun formatAssociatedQueries(alertContext: AlertContext): List>? { - return alertContext.associatedQueries?.map { + private fun formatAssociatedQueries(alertContext: AlertContext): List>? = + alertContext.associatedQueries?.map { mapOf( DocLevelQuery.QUERY_ID_FIELD to it.id, DocLevelQuery.NAME_FIELD to it.name, - DocLevelQuery.TAGS_FIELD to it.tags + DocLevelQuery.TAGS_FIELD to it.tags, ) } - } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt index 08fba74cb..a59391355 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/AlertTests.kt @@ -10,8 +10,10 @@ import org.opensearch.alerting.randomAlert import org.opensearch.alerting.randomAlertWithAggregationResultBucket import org.opensearch.commons.alerting.model.Alert import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class AlertTests : OpenSearchTestCase() { + @Test fun `test alert as template args`() { val alert = randomAlert().copy(acknowledgedTime = null, lastNotificationTime = null) @@ -28,6 +30,7 @@ class AlertTests : OpenSearchTestCase() { assertEquals("Template args severity does not match", templateArgs[Alert.SEVERITY_FIELD], alert.severity) } + @Test fun `test agg alert as template args`() { val alert = randomAlertWithAggregationResultBucket().copy(acknowledgedTime = null, lastNotificationTime = null) @@ -44,14 +47,17 @@ class AlertTests : OpenSearchTestCase() { assertEquals("Template args severity does not match", templateArgs[Alert.SEVERITY_FIELD], alert.severity) Assert.assertEquals( "Template args bucketKeys do not match", - templateArgs[Alert.BUCKET_KEYS], alert.aggregationResultBucket?.bucketKeys?.joinToString(",") + templateArgs[Alert.BUCKET_KEYS], + alert.aggregationResultBucket?.bucketKeys?.joinToString(","), ) Assert.assertEquals( "Template args parentBucketPath does not match", - templateArgs[Alert.PARENTS_BUCKET_PATH], alert.aggregationResultBucket?.parentBucketPath + templateArgs[Alert.PARENTS_BUCKET_PATH], + alert.aggregationResultBucket?.parentBucketPath, ) } + @Test fun `test alert acknowledged`() { val ackAlert = randomAlert().copy(state = Alert.State.ACKNOWLEDGED) assertTrue("Alert is not acknowledged", ackAlert.isAcknowledged()) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/DestinationTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/DestinationTests.kt index 7dac05b2a..d7270dac2 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/DestinationTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/DestinationTests.kt @@ -19,14 +19,16 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase import java.time.Instant +import kotlin.test.Test class DestinationTests : OpenSearchTestCase() { - + @Test fun `test chime destination`() { val chime = Chime("http://abc.com") assertEquals("Url is manipulated", chime.url, "http://abc.com") } + @Test fun `test chime destination with out url`() { try { Chime("") @@ -35,11 +37,13 @@ class DestinationTests : OpenSearchTestCase() { } } + @Test fun `test slack destination`() { val slack = Slack("http://abc.com") assertEquals("Url is manipulated", slack.url, "http://abc.com") } + @Test fun `test slack destination with out url`() { try { Slack("") @@ -48,6 +52,7 @@ class DestinationTests : OpenSearchTestCase() { } } + @Test fun `test email destination without recipients`() { try { Email("", emptyList()) @@ -56,31 +61,35 @@ class DestinationTests : OpenSearchTestCase() { } } + @Test fun `test email recipient with valid email`() { Recipient( Recipient.RecipientType.EMAIL, null, - "test@email.com" + "test@email.com", ) } + @Test fun `test email recipient with invalid email fails`() { try { Recipient( Recipient.RecipientType.EMAIL, null, - "invalid@email" + "invalid@email", ) fail("Creating an email recipient with an invalid email did not fail.") } catch (ignored: IllegalArgumentException) { } } + @Test fun `test custom webhook destination with url and no host`() { val customWebhook = CustomWebhook("http://abc.com", null, null, -1, null, null, emptyMap(), emptyMap(), null, null) assertEquals("Url is manipulated", customWebhook.url, "http://abc.com") } + @Test fun `test custom webhook destination with host and no url`() { try { val customWebhook = CustomWebhook(null, null, "abc.com", 80, null, null, emptyMap(), emptyMap(), null, null) @@ -89,12 +98,14 @@ class DestinationTests : OpenSearchTestCase() { } } + @Test fun `test custom webhook destination with url and host`() { // In this case, url will be given priority val customWebhook = CustomWebhook("http://abc.com", null, null, -1, null, null, emptyMap(), emptyMap(), null, null) assertEquals("Url is manipulated", customWebhook.url, "http://abc.com") } + @Test fun `test custom webhook destination with no url and no host`() { try { CustomWebhook("", null, null, 80, null, null, emptyMap(), emptyMap(), null, null) @@ -103,11 +114,24 @@ class DestinationTests : OpenSearchTestCase() { } } + @Test fun `test chime destination create using stream`() { - val chimeDest = Destination( - "1234", 0L, 1, 1, 1, DestinationType.CHIME, "TestChimeDest", - randomUser(), Instant.now(), Chime("test.com"), null, null, null - ) + val chimeDest = + Destination( + "1234", + 0L, + 1, + 1, + 1, + DestinationType.CHIME, + "TestChimeDest", + randomUser(), + Instant.now(), + Chime("test.com"), + null, + null, + null, + ) val out = BytesStreamOutput() chimeDest.writeTo(out) @@ -127,11 +151,24 @@ class DestinationTests : OpenSearchTestCase() { assertNull(newDest.email) } + @Test fun `test slack destination create using stream`() { - val slackDest = Destination( - "2345", 1L, 2, 1, 1, DestinationType.SLACK, "TestSlackDest", - randomUser(), Instant.now(), null, Slack("mytest.com"), null, null - ) + val slackDest = + Destination( + "2345", + 1L, + 2, + 1, + 1, + DestinationType.SLACK, + "TestSlackDest", + randomUser(), + Instant.now(), + null, + Slack("mytest.com"), + null, + null, + ) val out = BytesStreamOutput() slackDest.writeTo(out) @@ -151,33 +188,35 @@ class DestinationTests : OpenSearchTestCase() { assertNull(newDest.email) } + @Test fun `test customwebhook destination create using stream`() { - val customWebhookDest = Destination( - "2345", - 1L, - 2, - 1, - 1, - DestinationType.SLACK, - "TestSlackDest", - randomUser(), - Instant.now(), - null, - null, - CustomWebhook( - "test.com", - "schema", - "localhost", - 162, - "/tmp/", - "POST", - mutableMapOf(), - mutableMapOf(), - ADMIN, - ADMIN - ), - null - ) + val customWebhookDest = + Destination( + "2345", + 1L, + 2, + 1, + 1, + DestinationType.SLACK, + "TestSlackDest", + randomUser(), + Instant.now(), + null, + null, + CustomWebhook( + "test.com", + "schema", + "localhost", + 162, + "/tmp/", + "POST", + mutableMapOf(), + mutableMapOf(), + ADMIN, + ADMIN, + ), + null, + ) val out = BytesStreamOutput() customWebhookDest.writeTo(out) val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) @@ -196,33 +235,35 @@ class DestinationTests : OpenSearchTestCase() { assertNull(newDest.email) } + @Test fun `test customwebhook destination create using stream with optionals`() { - val customWebhookDest = Destination( - "2345", - 1L, - 2, - 1, - 1, - DestinationType.SLACK, - "TestSlackDest", - randomUser(), - Instant.now(), - null, - null, - CustomWebhook( - "test.com", + val customWebhookDest = + Destination( + "2345", + 1L, + 2, + 1, + 1, + DestinationType.SLACK, + "TestSlackDest", + randomUser(), + Instant.now(), null, - "localhost", - 162, null, - "POST", - mutableMapOf(), - mutableMapOf(), + CustomWebhook( + "test.com", + null, + "localhost", + 162, + null, + "POST", + mutableMapOf(), + mutableMapOf(), + null, + null, + ), null, - null - ), - null - ) + ) val out = BytesStreamOutput() customWebhookDest.writeTo(out) val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) @@ -241,29 +282,32 @@ class DestinationTests : OpenSearchTestCase() { assertNull(newDest.email) } + @Test fun `test email destination create using stream`() { - val recipients = listOf( - Recipient( - Recipient.RecipientType.EMAIL, + val recipients = + listOf( + Recipient( + Recipient.RecipientType.EMAIL, + null, + "test@email.com", + ), + ) + val mailDest = + Destination( + "2345", + 1L, + 2, + 1, + 1, + DestinationType.EMAIL, + "TestEmailDest", + randomUser(), + Instant.now(), + null, null, - "test@email.com" + null, + Email("3456", recipients), ) - ) - val mailDest = Destination( - "2345", - 1L, - 2, - 1, - 1, - DestinationType.EMAIL, - "TestEmailDest", - randomUser(), - Instant.now(), - null, - null, - null, - Email("3456", recipients) - ) val out = BytesStreamOutput() mailDest.writeTo(out) @@ -286,24 +330,30 @@ class DestinationTests : OpenSearchTestCase() { assertEquals(recipients, newDest.email!!.recipients) } + @Test fun `test chime destination without user`() { - val userString = "{\"type\":\"chime\",\"name\":\"TestChimeDest\",\"schema_version\":1," + - "\"last_update_time\":1600063313658,\"chime\":{\"url\":\"test.com\"}}" + val userString = + "{\"type\":\"chime\",\"name\":\"TestChimeDest\",\"schema_version\":1," + + "\"last_update_time\":1600063313658,\"chime\":{\"url\":\"test.com\"}}" val parsedDest = Destination.parse(parser(userString)) assertNull(parsedDest.user) } + @Test fun `test chime destination with user`() { - val userString = "{\"type\":\"chime\",\"name\":\"TestChimeDest\",\"user\":{\"name\":\"joe\",\"backend_roles\"" + - ":[\"ops\",\"backup\"],\"roles\":[\"ops_role, backup_role\"],\"custom_attribute_names\":[\"test_attr=test\"]}," + - "\"schema_version\":1,\"last_update_time\":1600063313658,\"chime\":{\"url\":\"test.com\"}}" + val userString = + "{\"type\":\"chime\",\"name\":\"TestChimeDest\",\"user\":{\"name\":\"joe\",\"backend_roles\"" + + ":[\"ops\",\"backup\"],\"roles\":[\"ops_role, backup_role\"],\"custom_attribute_names\":[\"test_attr=test\"]}," + + "\"schema_version\":1,\"last_update_time\":1600063313658,\"chime\":{\"url\":\"test.com\"}}" val parsedDest = Destination.parse(parser(userString)) assertNotNull(parsedDest.user) } + @Test fun `test chime destination with user as null`() { - val userString = "{\"type\":\"chime\",\"name\":\"TestChimeDest\",\"user\":null,\"schema_version\":1," + - "\"last_update_time\":1600063313658,\"chime\":{\"url\":\"test.com\"}}" + val userString = + "{\"type\":\"chime\",\"name\":\"TestChimeDest\",\"user\":null,\"schema_version\":1," + + "\"last_update_time\":1600063313658,\"chime\":{\"url\":\"test.com\"}}" val parsedDest = Destination.parse(parser(userString)) assertNull(parsedDest.user) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailAccountTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailAccountTests.kt index d3e436378..94700e2cc 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailAccountTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailAccountTests.kt @@ -7,19 +7,21 @@ package org.opensearch.alerting.model import org.opensearch.alerting.model.destination.email.EmailAccount import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class EmailAccountTests : OpenSearchTestCase() { - + @Test fun `test email account`() { - val emailAccount = EmailAccount( - name = "test", - email = "test@email.com", - host = "smtp.com", - port = 25, - method = EmailAccount.MethodType.NONE, - username = null, - password = null - ) + val emailAccount = + EmailAccount( + name = "test", + email = "test@email.com", + host = "smtp.com", + port = 25, + method = EmailAccount.MethodType.NONE, + username = null, + password = null, + ) assertEquals("Email account name was changed", emailAccount.name, "test") assertEquals("Email account email was changed", emailAccount.email, "test@email.com") assertEquals("Email account host was changed", emailAccount.host, "smtp.com") @@ -27,6 +29,7 @@ class EmailAccountTests : OpenSearchTestCase() { assertEquals("Email account method was changed", emailAccount.method, EmailAccount.MethodType.NONE) } + @Test fun `test email account with invalid name`() { try { EmailAccount( @@ -36,13 +39,14 @@ class EmailAccountTests : OpenSearchTestCase() { port = 25, method = EmailAccount.MethodType.NONE, username = null, - password = null + password = null, ) fail("Creating an email account with an invalid name did not fail.") } catch (ignored: IllegalArgumentException) { } } + @Test fun `test email account with invalid email`() { try { EmailAccount( @@ -52,7 +56,7 @@ class EmailAccountTests : OpenSearchTestCase() { port = 25, method = EmailAccount.MethodType.NONE, username = null, - password = null + password = null, ) fail("Creating an email account with an invalid email did not fail.") } catch (ignored: IllegalArgumentException) { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailGroupTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailGroupTests.kt index a0c215059..ae783486d 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailGroupTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/EmailGroupTests.kt @@ -8,50 +8,58 @@ package org.opensearch.alerting.model import org.opensearch.alerting.model.destination.email.EmailEntry import org.opensearch.alerting.model.destination.email.EmailGroup import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class EmailGroupTests : OpenSearchTestCase() { - + @Test fun `test email group`() { - val emailGroup = EmailGroup( - name = "test", - emails = listOf(EmailEntry("test@email.com")) - ) + val emailGroup = + EmailGroup( + name = "test", + emails = listOf(EmailEntry("test@email.com")), + ) assertEquals("Email group name was changed", emailGroup.name, "test") assertEquals("Email group emails count was changed", emailGroup.emails.size, 1) assertEquals("Email group email entry was changed", emailGroup.emails[0].email, "test@email.com") } + @Test fun `test email group get emails as list of string`() { - val emailGroup = EmailGroup( - name = "test", - emails = listOf( - EmailEntry("test@email.com"), - EmailEntry("test2@email.com") + val emailGroup = + EmailGroup( + name = "test", + emails = + listOf( + EmailEntry("test@email.com"), + EmailEntry("test2@email.com"), + ), ) - ) assertEquals( "List of email strings does not match email entries", - listOf("test@email.com", "test2@email.com"), emailGroup.getEmailsAsListOfString() + listOf("test@email.com", "test2@email.com"), + emailGroup.getEmailsAsListOfString(), ) } + @Test fun `test email group with invalid name fails`() { try { EmailGroup( name = "invalid name", - emails = listOf(EmailEntry("test@email.com")) + emails = listOf(EmailEntry("test@email.com")), ) fail("Creating an email group with an invalid name did not fail.") } catch (ignored: IllegalArgumentException) { } } + @Test fun `test email group with invalid email fails`() { try { EmailGroup( name = "test", - emails = listOf(EmailEntry("invalid.com")) + emails = listOf(EmailEntry("invalid.com")), ) fail("Creating an email group with an invalid email did not fail.") } catch (ignored: IllegalArgumentException) { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt index f77ca3ddc..27f6c71cd 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/FindingTests.kt @@ -8,8 +8,10 @@ package org.opensearch.alerting.model import org.opensearch.alerting.randomFinding import org.opensearch.commons.alerting.model.Finding import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class FindingTests : OpenSearchTestCase() { + @Test fun `test finding asTemplateArgs`() { // GIVEN val finding = randomFinding() @@ -22,19 +24,19 @@ class FindingTests : OpenSearchTestCase() { assertEquals( "Template args 'relatedDocIds' field does not match:", templateArgs[Finding.RELATED_DOC_IDS_FIELD], - finding.relatedDocIds + finding.relatedDocIds, ) assertEquals("Template args 'monitorId' field does not match:", templateArgs[Finding.MONITOR_ID_FIELD], finding.monitorId) assertEquals( "Template args 'monitorName' field does not match:", templateArgs[Finding.MONITOR_NAME_FIELD], - finding.monitorName + finding.monitorName, ) assertEquals("Template args 'queries' field does not match:", templateArgs[Finding.QUERIES_FIELD], finding.docLevelQueries) assertEquals( "Template args 'timestamp' field does not match:", templateArgs[Finding.TIMESTAMP_FIELD], - finding.timestamp.toEpochMilli() + finding.timestamp.toEpochMilli(), ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt index d30864ec0..abc06f77f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/WriteableTests.kt @@ -14,9 +14,10 @@ import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class WriteableTests : OpenSearchTestCase() { - + @Test fun `test searchinput as stream`() { val input = SearchInput(emptyList(), SearchSourceBuilder()) val out = BytesStreamOutput() @@ -26,6 +27,7 @@ class WriteableTests : OpenSearchTestCase() { assertEquals("Round tripping MonitorRunResult doesn't work", input, newInput) } + @Test fun `test emailaccount as stream`() { val emailAccount = randomEmailAccount() val out = BytesStreamOutput() @@ -35,6 +37,7 @@ class WriteableTests : OpenSearchTestCase() { assertEquals("Round tripping EmailAccount doesn't work", emailAccount, newEmailAccount) } + @Test fun `test emailgroup as stream`() { val emailGroup = randomEmailGroup() val out = BytesStreamOutput() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt index 56fbd9866..e17a8ec69 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/model/XContentTests.kt @@ -19,9 +19,10 @@ import org.opensearch.commons.alerting.model.Alert import org.opensearch.commons.alerting.util.string import org.opensearch.core.xcontent.ToXContent import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class XContentTests : OpenSearchTestCase() { - + @Test fun `test alert parsing`() { val alert = randomAlert() @@ -31,28 +32,33 @@ class XContentTests : OpenSearchTestCase() { assertEquals("Round tripping alert doesn't work", alert, parsedAlert) } + @Test fun `test alert parsing without user`() { - val alertStr = "{\"id\":\"\",\"version\":-1,\"monitor_id\":\"\",\"schema_version\":0,\"monitor_version\":1," + - "\"monitor_name\":\"ARahqfRaJG\",\"trigger_id\":\"fhe1-XQBySl0wQKDBkOG\",\"trigger_name\":\"ffELMuhlro\"," + - "\"state\":\"ACTIVE\",\"error_message\":null,\"alert_history\":[],\"severity\":\"1\",\"action_execution_results\"" + - ":[{\"action_id\":\"ghe1-XQBySl0wQKDBkOG\",\"last_execution_time\":1601917224583,\"throttled_count\":-1478015168}," + - "{\"action_id\":\"gxe1-XQBySl0wQKDBkOH\",\"last_execution_time\":1601917224583,\"throttled_count\":-768533744}]," + - "\"start_time\":1601917224599,\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null}" + val alertStr = + "{\"id\":\"\",\"version\":-1,\"monitor_id\":\"\",\"schema_version\":0,\"monitor_version\":1," + + "\"monitor_name\":\"ARahqfRaJG\",\"trigger_id\":\"fhe1-XQBySl0wQKDBkOG\",\"trigger_name\":\"ffELMuhlro\"," + + "\"state\":\"ACTIVE\",\"error_message\":null,\"alert_history\":[],\"severity\":\"1\",\"action_execution_results\"" + + ":[{\"action_id\":\"ghe1-XQBySl0wQKDBkOG\",\"last_execution_time\":1601917224583,\"throttled_count\":-1478015168}," + + "{\"action_id\":\"gxe1-XQBySl0wQKDBkOH\",\"last_execution_time\":1601917224583,\"throttled_count\":-768533744}]," + + "\"start_time\":1601917224599,\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null}" val parsedAlert = Alert.parse(parser(alertStr)) assertNull(parsedAlert.monitorUser) } + @Test fun `test alert parsing with user as null`() { - val alertStr = "{\"id\":\"\",\"version\":-1,\"monitor_id\":\"\",\"schema_version\":0,\"monitor_version\":1,\"monitor_user\":null," + - "\"monitor_name\":\"ARahqfRaJG\",\"trigger_id\":\"fhe1-XQBySl0wQKDBkOG\",\"trigger_name\":\"ffELMuhlro\"," + - "\"state\":\"ACTIVE\",\"error_message\":null,\"alert_history\":[],\"severity\":\"1\",\"action_execution_results\"" + - ":[{\"action_id\":\"ghe1-XQBySl0wQKDBkOG\",\"last_execution_time\":1601917224583,\"throttled_count\":-1478015168}," + - "{\"action_id\":\"gxe1-XQBySl0wQKDBkOH\",\"last_execution_time\":1601917224583,\"throttled_count\":-768533744}]," + - "\"start_time\":1601917224599,\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null}" + val alertStr = + "{\"id\":\"\",\"version\":-1,\"monitor_id\":\"\",\"schema_version\":0,\"monitor_version\":1,\"monitor_user\":null," + + "\"monitor_name\":\"ARahqfRaJG\",\"trigger_id\":\"fhe1-XQBySl0wQKDBkOG\",\"trigger_name\":\"ffELMuhlro\"," + + "\"state\":\"ACTIVE\",\"error_message\":null,\"alert_history\":[],\"severity\":\"1\",\"action_execution_results\"" + + ":[{\"action_id\":\"ghe1-XQBySl0wQKDBkOG\",\"last_execution_time\":1601917224583,\"throttled_count\":-1478015168}," + + "{\"action_id\":\"gxe1-XQBySl0wQKDBkOH\",\"last_execution_time\":1601917224583,\"throttled_count\":-768533744}]," + + "\"start_time\":1601917224599,\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null}" val parsedAlert = Alert.parse(parser(alertStr)) assertNull(parsedAlert.monitorUser) } + @Test fun `test action execution result parsing`() { val actionExecutionResult = randomActionExecutionResult() @@ -62,6 +68,7 @@ class XContentTests : OpenSearchTestCase() { assertEquals("Round tripping alert doesn't work", actionExecutionResult, parsedActionExecutionResultString) } + @Test fun `test email account parsing`() { val emailAccount = randomEmailAccount() @@ -70,6 +77,7 @@ class XContentTests : OpenSearchTestCase() { assertEquals("Round tripping EmailAccount doesn't work", emailAccount, parsedEmailAccount) } + @Test fun `test email group parsing`() { val emailGroup = randomEmailGroup() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/AlertV2Tests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/AlertV2Tests.kt index fade152bc..e57ddf661 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/AlertV2Tests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/AlertV2Tests.kt @@ -15,8 +15,10 @@ import org.opensearch.alerting.randomAlertV2 import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class AlertV2Tests : OpenSearchTestCase() { + @Test fun `test alertv2 as stream`() { val alertV2 = randomAlertV2() val out = BytesStreamOutput() @@ -26,6 +28,7 @@ class AlertV2Tests : OpenSearchTestCase() { assertAlertV2sEqual(alertV2, newAlertV2) } + @Test fun `test alertv2 asTemplateArgs`() { val alertV2 = randomAlertV2() val templateArgs = alertV2.asTemplateArg() @@ -33,27 +36,27 @@ class AlertV2Tests : OpenSearchTestCase() { assertEquals( "Template args field $ALERT_V2_ID_FIELD doesn't match", alertV2.id, - templateArgs[ALERT_V2_ID_FIELD] + templateArgs[ALERT_V2_ID_FIELD], ) assertEquals( "Template args field $ALERT_V2_VERSION_FIELD doesn't match", alertV2.version, - templateArgs[ALERT_V2_VERSION_FIELD] + templateArgs[ALERT_V2_VERSION_FIELD], ) assertEquals( "Template args field $ERROR_MESSAGE_FIELD doesn't match", alertV2.errorMessage, - templateArgs[ERROR_MESSAGE_FIELD] + templateArgs[ERROR_MESSAGE_FIELD], ) assertEquals( "Template args field $EXECUTION_ID_FIELD doesn't match", alertV2.executionId, - templateArgs[EXECUTION_ID_FIELD] + templateArgs[EXECUTION_ID_FIELD], ) assertEquals( "Template args field $SEVERITY_FIELD doesn't match", alertV2.severity.value, - templateArgs[SEVERITY_FIELD] + templateArgs[SEVERITY_FIELD], ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/MonitorV2Tests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/MonitorV2Tests.kt index 998131ad2..42e78d29d 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/MonitorV2Tests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/MonitorV2Tests.kt @@ -24,23 +24,28 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase import java.lang.IllegalArgumentException import java.time.Instant +import kotlin.test.Test class MonitorV2Tests : OpenSearchTestCase() { + @Test fun `test enabled time`() { val pplMonitor = randomPPLMonitor(enabled = true, enabledTime = Instant.now()) try { pplMonitor.makeCopy(enabled = false) fail("Disabling monitor with enabled time set should fail.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } val disabledMonitor = pplMonitor.copy(enabled = false, enabledTime = null) try { disabledMonitor.makeCopy(enabled = true) fail("Enabling monitor without enabled time should fail") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test max triggers`() { val tooManyTriggers = mutableListOf() for (i in 0..10) { // 11 times @@ -50,9 +55,11 @@ class MonitorV2Tests : OpenSearchTestCase() { try { randomPPLMonitor(triggers = tooManyTriggers) fail("Monitor with too many triggers should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test monitor name too long`() { var monitorName = "" for (i in 0 until ALERTING_V2_MAX_NAME_LENGTH + 1) { @@ -62,47 +69,57 @@ class MonitorV2Tests : OpenSearchTestCase() { try { randomPPLMonitor(name = monitorName) fail("Monitor with too long a name should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test monitor min look back window`() { try { randomPPLMonitor( - lookBackWindow = MONITOR_V2_MIN_LOOK_BACK_WINDOW - 1 + lookBackWindow = MONITOR_V2_MIN_LOOK_BACK_WINDOW - 1, ) fail("Monitor with too long a name should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test monitor no triggers`() { try { randomPPLMonitor( - triggers = listOf() + triggers = listOf(), ) fail("Monitor without triggers be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test monitor with look back window without timestamp field`() { try { randomPPLMonitor( lookBackWindow = randomLongBetween(1, 10), - timestampField = null + timestampField = null, ) fail("Monitor with look back window but without timestamp field be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test monitor without look back window with timestamp field`() { try { randomPPLMonitor( lookBackWindow = null, - timestampField = "some_timestamp_field" + timestampField = "some_timestamp_field", ) fail("Monitor without look back window but with timestamp field be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test monitor v2 as stream`() { val pplMonitor = randomPPLMonitor() val monitorV2 = pplMonitor as MonitorV2 @@ -114,6 +131,7 @@ class MonitorV2Tests : OpenSearchTestCase() { assertPplMonitorsEqual(pplMonitor, newPplMonitor) } + @Test fun `test ppl monitor as stream`() { val pplMonitor = randomPPLMonitor() val out = BytesStreamOutput() @@ -123,6 +141,7 @@ class MonitorV2Tests : OpenSearchTestCase() { assertPplMonitorsEqual(pplMonitor, newPplMonitor) } + @Test fun `test ppl monitor asTemplateArgs`() { val pplMonitor = randomPPLMonitor() val templateArgs = pplMonitor.asTemplateArg() @@ -130,43 +149,43 @@ class MonitorV2Tests : OpenSearchTestCase() { assertEquals( "Template args field $_ID doesn't match", pplMonitor.id, - templateArgs[_ID] + templateArgs[_ID], ) assertEquals( "Template args field $_VERSION doesn't match", pplMonitor.version, - templateArgs[_VERSION] + templateArgs[_VERSION], ) assertEquals( "Template args field $NAME_FIELD doesn't match", pplMonitor.name, - templateArgs[NAME_FIELD] + templateArgs[NAME_FIELD], ) assertEquals( "Template args field $ENABLED_FIELD doesn't match", pplMonitor.enabled, - templateArgs[ENABLED_FIELD] + templateArgs[ENABLED_FIELD], ) assertNotNull(templateArgs[SCHEDULE_FIELD]) assertEquals( "Template args field $LOOK_BACK_WINDOW_FIELD doesn't match", pplMonitor.lookBackWindow, - templateArgs[LOOK_BACK_WINDOW_FIELD] + templateArgs[LOOK_BACK_WINDOW_FIELD], ) assertEquals( "Template args field $LAST_UPDATE_TIME_FIELD doesn't match", pplMonitor.lastUpdateTime.toEpochMilli(), - templateArgs[LAST_UPDATE_TIME_FIELD] + templateArgs[LAST_UPDATE_TIME_FIELD], ) assertEquals( "Template args field $ENABLED_TIME_FIELD doesn't match", pplMonitor.enabledTime?.toEpochMilli(), - templateArgs[ENABLED_TIME_FIELD] + templateArgs[ENABLED_TIME_FIELD], ) assertEquals( "Template args field $QUERY_FIELD doesn't match", pplMonitor.query, - templateArgs[QUERY_FIELD] + templateArgs[QUERY_FIELD], ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/RunResultV2Tests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/RunResultV2Tests.kt index 4290af27c..38e64c21c 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/RunResultV2Tests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/RunResultV2Tests.kt @@ -8,14 +8,17 @@ package org.opensearch.alerting.modelv2 import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class RunResultV2Tests : OpenSearchTestCase() { + @Test fun `test ppl sql trigger run result as stream`() { - val runResult = PPLSQLTriggerRunResult( - triggerName = "some-trigger", - triggered = true, - error = IllegalArgumentException("some-error") - ) + val runResult = + PPLSQLTriggerRunResult( + triggerName = "some-trigger", + triggered = true, + error = IllegalArgumentException("some-error"), + ) val out = BytesStreamOutput() runResult.writeTo(out) val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) @@ -23,19 +26,23 @@ class RunResultV2Tests : OpenSearchTestCase() { assertEquals(runResult.triggerName, newRunResult.triggerName) } + @Test fun `test ppl sql monitor run result as monitor v2 run result as stream`() { - val monitorRunResult = PPLSQLMonitorRunResult( - monitorName = "some-monitor", - error = IllegalArgumentException("some-error"), - triggerResults = mapOf( - "some-trigger-id" to PPLSQLTriggerRunResult( - triggerName = "some-trigger", - triggered = true, - error = IllegalArgumentException("some-error") - ) - ), - pplQueryResults = mapOf("some-result" to mapOf("some-field" to 2)) - ) as MonitorV2RunResult + val monitorRunResult = + PPLSQLMonitorRunResult( + monitorName = "some-monitor", + error = IllegalArgumentException("some-error"), + triggerResults = + mapOf( + "some-trigger-id" to + PPLSQLTriggerRunResult( + triggerName = "some-trigger", + triggered = true, + error = IllegalArgumentException("some-error"), + ), + ), + pplQueryResults = mapOf("some-result" to mapOf("some-field" to 2)), + ) as MonitorV2RunResult val out = BytesStreamOutput() MonitorV2RunResult.writeTo(out, monitorRunResult) @@ -47,15 +54,15 @@ class RunResultV2Tests : OpenSearchTestCase() { assert(newMonitorRunResult.triggerResults.containsKey("some-trigger-id")) assertEquals( monitorRunResult.triggerResults["some-trigger-id"]!!.triggerName, - newMonitorRunResult.triggerResults["some-trigger-id"]!!.triggerName + newMonitorRunResult.triggerResults["some-trigger-id"]!!.triggerName, ) assertEquals( monitorRunResult.triggerResults["some-trigger-id"]!!.triggered, - newMonitorRunResult.triggerResults["some-trigger-id"]!!.triggered + newMonitorRunResult.triggerResults["some-trigger-id"]!!.triggered, ) assertEquals( monitorRunResult.triggerResults["some-trigger-id"]!!.error?.message, - newMonitorRunResult.triggerResults["some-trigger-id"]!!.error?.message + newMonitorRunResult.triggerResults["some-trigger-id"]!!.error?.message, ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/TriggerV2Tests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/TriggerV2Tests.kt index 430572254..e625fbfda 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/TriggerV2Tests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/modelv2/TriggerV2Tests.kt @@ -29,26 +29,32 @@ import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase import java.lang.IllegalArgumentException +import kotlin.test.Test class TriggerV2Tests : OpenSearchTestCase() { + @Test fun `test min throttle duration`() { try { randomPPLTrigger( - throttleDuration = MONITOR_V2_MIN_THROTTLE_DURATION_MINUTES - 1 + throttleDuration = MONITOR_V2_MIN_THROTTLE_DURATION_MINUTES - 1, ) fail("Trigger with throttle duration less than 1 should be rejected") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test min expire duration`() { try { randomPPLTrigger( - expireDuration = MONITOR_V2_MIN_EXPIRE_DURATION_MINUTES - 1 + expireDuration = MONITOR_V2_MIN_EXPIRE_DURATION_MINUTES - 1, ) fail("Trigger with expire duration less than 1 should be rejected") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test trigger name too long`() { var triggerName = "" for (i in 0 until ALERTING_V2_MAX_NAME_LENGTH + 1) { @@ -58,20 +64,24 @@ class TriggerV2Tests : OpenSearchTestCase() { try { randomPPLTrigger(name = triggerName) fail("Trigger with too long a name should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test number of results trigger with negative number of results value`() { try { randomPPLTrigger( conditionType = ConditionType.NUMBER_OF_RESULTS, numResultsValue = -1L, - numResultsCondition = NumResultsCondition.GREATER_THAN + numResultsCondition = NumResultsCondition.GREATER_THAN, ) fail("Number of results trigger with negative number of results value should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test trigger action name too long`() { var actionName = "" for (i in 0 until ALERTING_V2_MAX_NAME_LENGTH + 1) { @@ -80,16 +90,19 @@ class TriggerV2Tests : OpenSearchTestCase() { try { randomPPLTrigger( - actions = listOf( - randomAction( - name = actionName - ) - ) + actions = + listOf( + randomAction( + name = actionName, + ), + ), ) fail("Trigger action with too long a name should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test trigger action channel ID too long`() { var channelId = "" for (i in 0 until NOTIFICATIONS_ID_MAX_LENGTH + 1) { @@ -98,88 +111,103 @@ class TriggerV2Tests : OpenSearchTestCase() { try { randomPPLTrigger( - actions = listOf( - randomAction( - destinationId = channelId - ) - ) + actions = + listOf( + randomAction( + destinationId = channelId, + ), + ), ) fail("Trigger action with too long a channel ID should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test number_of_results trigger has no number_of_results value field`() { try { randomPPLTrigger( conditionType = ConditionType.NUMBER_OF_RESULTS, numResultsCondition = NumResultsCondition.entries.random(), numResultsValue = null, - customCondition = null + customCondition = null, ) fail("Number of results trigger that has no number of results value should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test number_of_results trigger has no number_of_results condition field`() { try { randomPPLTrigger( conditionType = ConditionType.NUMBER_OF_RESULTS, numResultsCondition = null, numResultsValue = randomLongBetween(1, 10), - customCondition = null + customCondition = null, ) fail("Number of results trigger that has no number of results condition should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test number_of_results trigger has custom_condition value field`() { try { randomPPLTrigger( conditionType = ConditionType.NUMBER_OF_RESULTS, numResultsCondition = null, numResultsValue = null, - customCondition = "eval result = something > 5" + customCondition = "eval result = something > 5", ) fail("Number of results trigger that has custom condition should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test custom trigger has number_of_results value field`() { try { randomPPLTrigger( conditionType = ConditionType.CUSTOM, numResultsCondition = NumResultsCondition.entries.random(), numResultsValue = null, - customCondition = null + customCondition = null, ) fail("Number of results trigger that has no number of results value should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test custom trigger has number_of_results condition field`() { try { randomPPLTrigger( conditionType = ConditionType.CUSTOM, numResultsCondition = null, numResultsValue = randomLongBetween(1, 10), - customCondition = null + customCondition = null, ) fail("Number of results trigger that has no number of results condition should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test custom trigger has no custom_condition value field`() { try { randomPPLTrigger( conditionType = ConditionType.CUSTOM, numResultsCondition = null, numResultsValue = null, - customCondition = null + customCondition = null, ) fail("Number of results trigger that has custom condition should be rejected.") - } catch (_: IllegalArgumentException) {} + } catch (_: IllegalArgumentException) { + } } + @Test fun `test ppl trigger as stream`() { val pplTrigger = randomPPLTrigger() val out = BytesStreamOutput() @@ -189,6 +217,7 @@ class TriggerV2Tests : OpenSearchTestCase() { assertPplTriggersEqual(pplTrigger, newPplTrigger) } + @Test fun `test ppl trigger asTemplateArgs`() { val pplTrigger = randomPPLTrigger() val templateArgs = pplTrigger.asTemplateArg() @@ -196,59 +225,59 @@ class TriggerV2Tests : OpenSearchTestCase() { assertEquals( "Template args field $ID_FIELD doesn't match", pplTrigger.id, - templateArgs[ID_FIELD] + templateArgs[ID_FIELD], ) assertEquals( "Template args field $NAME_FIELD doesn't match", pplTrigger.name, - templateArgs[NAME_FIELD] + templateArgs[NAME_FIELD], ) assertEquals( "Template args field $SEVERITY_FIELD doesn't match", pplTrigger.severity.value, - templateArgs[SEVERITY_FIELD] + templateArgs[SEVERITY_FIELD], ) assertEquals( "Template args field $THROTTLE_FIELD doesn't match", pplTrigger.throttleDuration, - templateArgs[THROTTLE_FIELD] + templateArgs[THROTTLE_FIELD], ) assertEquals( "Template args field $EXPIRE_FIELD doesn't match", pplTrigger.expireDuration, - templateArgs[EXPIRE_FIELD] + templateArgs[EXPIRE_FIELD], ) assertEquals( "Template args field $EXPIRE_FIELD doesn't match", pplTrigger.expireDuration, - templateArgs[EXPIRE_FIELD] + templateArgs[EXPIRE_FIELD], ) val actions = templateArgs[ACTIONS_FIELD] as List<*> assertEquals("number of trigger actions doesn't match", pplTrigger.actions.size, actions.size) assertEquals( "Template args field $MODE_FIELD doesn't match", pplTrigger.mode.value, - templateArgs[MODE_FIELD] + templateArgs[MODE_FIELD], ) assertEquals( "Template args field $CONDITION_TYPE_FIELD doesn't match", pplTrigger.conditionType.value, - templateArgs[CONDITION_TYPE_FIELD] + templateArgs[CONDITION_TYPE_FIELD], ) assertEquals( "Template args field $NUM_RESULTS_CONDITION_FIELD doesn't match", pplTrigger.numResultsCondition?.value, - templateArgs[NUM_RESULTS_CONDITION_FIELD] + templateArgs[NUM_RESULTS_CONDITION_FIELD], ) assertEquals( "Template args field $NUM_RESULTS_VALUE_FIELD doesn't match", pplTrigger.numResultsValue, - templateArgs[NUM_RESULTS_VALUE_FIELD] + templateArgs[NUM_RESULTS_VALUE_FIELD], ) assertEquals( "Template args field $CUSTOM_CONDITION_FIELD doesn't match", pplTrigger.customCondition, - templateArgs[CUSTOM_CONDITION_FIELD] + templateArgs[CUSTOM_CONDITION_FIELD], ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/AlertingCommentsRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/AlertingCommentsRestApiIT.kt index b59a7ce07..833b0c3d3 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/AlertingCommentsRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/AlertingCommentsRestApiIT.kt @@ -12,11 +12,12 @@ import org.opensearch.commons.alerting.model.Alert import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.junit.annotations.TestLogging +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class AlertingCommentsRestApiIT : AlertingRestTestCase() { - + @Test fun `test creating comment`() { client().updateSettings(ALERTING_COMMENTS_ENABLED.key, "true") @@ -31,6 +32,7 @@ class AlertingCommentsRestApiIT : AlertingRestTestCase() { assertEquals("Comment does not have correct alert ID", alertId, comment.entityId) } + @Test fun `test updating comment`() { client().updateSettings(ALERTING_COMMENTS_ENABLED.key, "true") @@ -47,6 +49,7 @@ class AlertingCommentsRestApiIT : AlertingRestTestCase() { assertEquals("Comment does not have correct content after update", updateContent, actualContent) } + @Test fun `test searching single comment by alert id`() { client().updateSettings(ALERTING_COMMENTS_ENABLED.key, "true") @@ -72,6 +75,7 @@ class AlertingCommentsRestApiIT : AlertingRestTestCase() { assertEquals("returned Comment does not have expected content", commentContent, commentHit["content"]) } + @Test fun `test deleting comments`() { client().updateSettings(ALERTING_COMMENTS_ENABLED.key, "true") @@ -87,8 +91,8 @@ class AlertingCommentsRestApiIT : AlertingRestTestCase() { assertEquals("Deleted Comment ID does not match Comment ID in delete request", commentId, deletedCommentId) } - // TODO: test list /* + TODO: test list create comment with empty content should fail create without alert id should fail update without comment id should fail diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/DestinationRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/DestinationRestApiIT.kt index 9858b7e38..b15969696 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/DestinationRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/DestinationRestApiIT.kt @@ -17,62 +17,77 @@ import org.opensearch.alerting.randomUser import org.opensearch.alerting.util.DestinationType import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class DestinationRestApiIT : AlertingRestTestCase() { - + @Test fun `test creating a chime destination`() { val chime = Chime("http://abc.com") - val destination = Destination( - type = DestinationType.CHIME, - name = "test", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = chime, - slack = null, - customWebhook = null, - email = null - ) + val destination = + Destination( + type = DestinationType.CHIME, + name = "test", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = chime, + slack = null, + customWebhook = null, + email = null, + ) val createdDestination = createDestination(destination = destination) assertEquals("Incorrect destination name", createdDestination.name, "test") assertEquals("Incorrect destination type", createdDestination.type, DestinationType.CHIME) Assert.assertNotNull("chime object should not be null", createdDestination.chime) } + @Test fun `test creating a custom webhook destination with url`() { val customWebhook = CustomWebhook("http://abc.com", null, null, 80, null, "PUT", emptyMap(), emptyMap(), null, null) - val destination = Destination( - type = DestinationType.CUSTOM_WEBHOOK, - name = "test", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = null, - customWebhook = customWebhook, - email = null - ) + val destination = + Destination( + type = DestinationType.CUSTOM_WEBHOOK, + name = "test", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = null, + customWebhook = customWebhook, + email = null, + ) val createdDestination = createDestination(destination = destination) assertEquals("Incorrect destination name", createdDestination.name, "test") assertEquals("Incorrect destination type", createdDestination.type, DestinationType.CUSTOM_WEBHOOK) Assert.assertNotNull("custom webhook object should not be null", createdDestination.customWebhook) } + @Test fun `test creating a custom webhook destination with host`() { - val customWebhook = CustomWebhook( - "", "http", "abc.com", 80, "a/b/c", "PATCH", - mapOf("foo" to "1", "bar" to "2"), mapOf("h1" to "1", "h2" to "2"), null, null - ) - val destination = Destination( - type = DestinationType.CUSTOM_WEBHOOK, - name = "test", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = null, - customWebhook = customWebhook, - email = null - ) + val customWebhook = + CustomWebhook( + "", + "http", + "abc.com", + 80, + "a/b/c", + "PATCH", + mapOf("foo" to "1", "bar" to "2"), + mapOf("h1" to "1", "h2" to "2"), + null, + null, + ) + val destination = + Destination( + type = DestinationType.CUSTOM_WEBHOOK, + name = "test", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = null, + customWebhook = customWebhook, + email = null, + ) val createdDestination = createDestination(destination = destination) assertEquals("Incorrect destination name", createdDestination.name, "test") assertEquals("Incorrect destination type", createdDestination.type, DestinationType.CUSTOM_WEBHOOK) @@ -84,34 +99,45 @@ class DestinationRestApiIT : AlertingRestTestCase() { Assert.assertNotNull("custom webhook object should not be null", createdDestination.customWebhook) } + @Test fun `test creating an email destination`() { val recipient = Recipient(type = Recipient.RecipientType.EMAIL, emailGroupID = null, email = "test@email.com") val email = Email("", listOf(recipient)) - val destination = Destination( - type = DestinationType.EMAIL, - name = "test", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = null, - customWebhook = null, - email = email - ) + val destination = + Destination( + type = DestinationType.EMAIL, + name = "test", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = null, + customWebhook = null, + email = email, + ) val createdDestination = createDestination(destination = destination) Assert.assertNotNull("Email object should not be null", createdDestination.email) assertEquals("Incorrect destination name", createdDestination.name, "test") assertEquals("Incorrect destination type", createdDestination.type, DestinationType.EMAIL) assertEquals( - "Incorrect email destination recipient type", createdDestination.email?.recipients?.get(0)?.type, - Recipient.RecipientType.EMAIL + "Incorrect email destination recipient type", + createdDestination.email + ?.recipients + ?.get(0) + ?.type, + Recipient.RecipientType.EMAIL, ) assertEquals( - "Incorrect email destination recipient email", createdDestination.email?.recipients?.get(0)?.email, - "test@email.com" + "Incorrect email destination recipient email", + createdDestination.email + ?.recipients + ?.get(0) + ?.email, + "test@email.com", ) } + @Test fun `test get destination`() { val destination = createDestination() val getDestinationResponse = getDestination(destination) @@ -122,18 +148,20 @@ class DestinationRestApiIT : AlertingRestTestCase() { assertEquals(destination.primaryTerm, getDestinationResponse["primary_term"]) } + @Test fun `test get destinations with slack destination type`() { val slack = Slack("url") - val dest = Destination( - type = DestinationType.SLACK, - name = "testSlack", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = slack, - customWebhook = null, - email = null - ) + val dest = + Destination( + type = DestinationType.SLACK, + name = "testSlack", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = slack, + customWebhook = null, + email = null, + ) val inputMap = HashMap() inputMap["missing"] = "_last" @@ -154,18 +182,20 @@ class DestinationRestApiIT : AlertingRestTestCase() { assertEquals(destination.primaryTerm, getDestinationResponse["primary_term"]) } + @Test fun `test get destinations matching a given name`() { val slack = Slack("url") - val dest = Destination( - type = DestinationType.SLACK, - name = "testSlack", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = slack, - customWebhook = null, - email = null - ) + val dest = + Destination( + type = DestinationType.SLACK, + name = "testSlack", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = slack, + customWebhook = null, + email = null, + ) val inputMap = HashMap() inputMap["searchString"] = "testSlack" diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailAccountRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailAccountRestApiIT.kt index c86270f5b..3c58eddc4 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailAccountRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailAccountRestApiIT.kt @@ -19,21 +19,23 @@ import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.junit.annotations.TestLogging +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class EmailAccountRestApiIT : AlertingRestTestCase() { - + @Test fun `test creating an email account`() { - val emailAccount = EmailAccount( - name = "test", - email = "test@email.com", - host = "smtp.com", - port = 25, - method = EmailAccount.MethodType.NONE, - username = null, - password = null - ) + val emailAccount = + EmailAccount( + name = "test", + email = "test@email.com", + host = "smtp.com", + port = 25, + method = EmailAccount.MethodType.NONE, + username = null, + password = null, + ) val createdEmailAccount = createEmailAccount(emailAccount = emailAccount) assertEquals("Incorrect email account name", createdEmailAccount.name, "test") assertEquals("Incorrect email account email", createdEmailAccount.email, "test@email.com") @@ -42,6 +44,7 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { assertEquals("Incorrect email account method", createdEmailAccount.method, EmailAccount.MethodType.NONE) } + @Test fun `test creating an email account with PUT fails`() { try { val emailAccount = randomEmailAccount() @@ -52,6 +55,7 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { } } + @Test fun `test creating an email account when email destination is disallowed fails`() { try { removeEmailFromAllowList() @@ -62,12 +66,14 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { } } + @Test fun `test getting an email account`() { val emailAccount = createRandomEmailAccount() val storedEmailAccount = getEmailAccount(emailAccount.id) assertEquals("Indexed and retrieved email account differ", emailAccount, storedEmailAccount) } + @Test fun `test getting an email account that doesn't exist`() { try { getEmailAccount(randomAlphaOfLength(20)) @@ -77,6 +83,7 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { } } + @Test fun `test getting an email account when email destination is disallowed fails`() { val emailAccount = createRandomEmailAccount() @@ -89,6 +96,7 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { } } + @Test fun `test checking if an email account exists`() { val emailAccount = createRandomEmailAccount() @@ -97,21 +105,24 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { assertNull("Response contains unexpected body", headResponse.entity) } + @Test fun `test checking if a non-existent email account exists`() { val headResponse = client().makeRequest("HEAD", "$EMAIL_ACCOUNT_BASE_URI/foobar") assertEquals("Unexpected status", RestStatus.NOT_FOUND, headResponse.restStatus()) } + @Test fun `test querying an email account that exists`() { val emailAccount = createRandomEmailAccount() val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", emailAccount.id)).toString() - val searchResponse = client().makeRequest( - "GET", - "$EMAIL_ACCOUNT_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$EMAIL_ACCOUNT_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search email account failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -119,16 +130,18 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { assertEquals("Email account not found during search", 1, numberOfDocsFound) } + @Test fun `test querying an email account that exists with POST`() { val emailAccount = createRandomEmailAccount() val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", emailAccount.id)).toString() - val searchResponse = client().makeRequest( - "POST", - "$EMAIL_ACCOUNT_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "POST", + "$EMAIL_ACCOUNT_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search email account failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -136,23 +149,26 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { assertEquals("Email account not found during search", 1, numberOfDocsFound) } + @Test fun `test querying an email account that doesn't exist`() { // Create a random email account to create the ScheduledJob index. Otherwise the test will fail with a 404 index not found error. createRandomEmailAccount() - val search = SearchSourceBuilder() - .query( - QueryBuilders.termQuery( - OpenSearchTestCase.randomAlphaOfLength(5), - OpenSearchTestCase.randomAlphaOfLength(5) - ) - ).toString() - - val searchResponse = client().makeRequest( - "GET", - "$EMAIL_ACCOUNT_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val search = + SearchSourceBuilder() + .query( + QueryBuilders.termQuery( + OpenSearchTestCase.randomAlphaOfLength(5), + OpenSearchTestCase.randomAlphaOfLength(5), + ), + ).toString() + + val searchResponse = + client().makeRequest( + "GET", + "$EMAIL_ACCOUNT_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search email account failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -160,6 +176,7 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { assertEquals("Email account found during search when no document was present", 0, numberOfDocsFound) } + @Test fun `test querying an email account when email destination is disallowed fails`() { val emailAccount = createRandomEmailAccount() @@ -170,7 +187,7 @@ class EmailAccountRestApiIT : AlertingRestTestCase() { "GET", "$EMAIL_ACCOUNT_BASE_URI/_search", emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) + StringEntity(search, ContentType.APPLICATION_JSON), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailGroupRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailGroupRestApiIT.kt index c60a09d6b..51b2d8427 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailGroupRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/EmailGroupRestApiIT.kt @@ -20,21 +20,24 @@ import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase import org.opensearch.test.junit.annotations.TestLogging +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class EmailGroupRestApiIT : AlertingRestTestCase() { - + @Test fun `test creating an email group`() { - val emailGroup = EmailGroup( - name = "test", - emails = listOf(EmailEntry("test@email.com")) - ) + val emailGroup = + EmailGroup( + name = "test", + emails = listOf(EmailEntry("test@email.com")), + ) val createdEmailGroup = createEmailGroup(emailGroup = emailGroup) assertEquals("Incorrect email group name", createdEmailGroup.name, "test") assertEquals("Incorrect email group email entry", createdEmailGroup.emails[0].email, "test@email.com") } + @Test fun `test creating an email group with PUT fails`() { try { val emailGroup = randomEmailGroup() @@ -45,6 +48,7 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { } } + @Test fun `test creating an email group when email destination is disallowed fails`() { try { removeEmailFromAllowList() @@ -55,12 +59,14 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { } } + @Test fun `test getting an email group`() { val emailGroup = createRandomEmailGroup() val storedEmailGroup = getEmailGroup(emailGroup.id) assertEquals("Indexed and retrieved email group differ", emailGroup, storedEmailGroup) } + @Test fun `test getting an email group that doesn't exist`() { try { getEmailGroup(randomAlphaOfLength(20)) @@ -70,6 +76,7 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { } } + @Test fun `test getting an email group when email destination is disallowed fails`() { val emailGroup = createRandomEmailGroup() @@ -82,6 +89,7 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { } } + @Test fun `test checking if an email group exists`() { val emailGroup = createRandomEmailGroup() @@ -90,21 +98,24 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { assertNull("Response contains unexpected body", headResponse.entity) } + @Test fun `test checking if a non-existent email group exists`() { val headResponse = client().makeRequest("HEAD", "$EMAIL_GROUP_BASE_URI/foobar") assertEquals("Unexpected status", RestStatus.NOT_FOUND, headResponse.restStatus()) } + @Test fun `test querying an email group that exists`() { val emailGroup = createRandomEmailGroup() val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", emailGroup.id)).toString() - val searchResponse = client().makeRequest( - "GET", - "$EMAIL_GROUP_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$EMAIL_GROUP_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search email group failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -112,16 +123,18 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { assertEquals("Email group not found during search", 1, numberOfDocsFound) } + @Test fun `test querying an email group that exists with POST`() { val emailGroup = createRandomEmailGroup() val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", emailGroup.id)).toString() - val searchResponse = client().makeRequest( - "POST", - "$EMAIL_GROUP_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "POST", + "$EMAIL_GROUP_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search email group failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -129,23 +142,26 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { assertEquals("Email group not found during search", 1, numberOfDocsFound) } + @Test fun `test querying an email group that doesn't exist`() { // Create a random email group to create the ScheduledJob index. Otherwise the test will fail with a 404 index not found error. createRandomEmailGroup() - val search = SearchSourceBuilder() - .query( - QueryBuilders.termQuery( - OpenSearchTestCase.randomAlphaOfLength(5), - OpenSearchTestCase.randomAlphaOfLength(5) - ) - ).toString() - - val searchResponse = client().makeRequest( - "GET", - "$EMAIL_GROUP_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val search = + SearchSourceBuilder() + .query( + QueryBuilders.termQuery( + OpenSearchTestCase.randomAlphaOfLength(5), + OpenSearchTestCase.randomAlphaOfLength(5), + ), + ).toString() + + val searchResponse = + client().makeRequest( + "GET", + "$EMAIL_GROUP_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search email group failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -153,6 +169,7 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { assertEquals("Email group found during search when no document was present", 0, numberOfDocsFound) } + @Test fun `test querying an email group when email destination is disallowed fails`() { val emailGroup = createRandomEmailGroup() @@ -163,7 +180,7 @@ class EmailGroupRestApiIT : AlertingRestTestCase() { "GET", "$EMAIL_GROUP_BASE_URI/_search", emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) + StringEntity(search, ContentType.APPLICATION_JSON), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt index bc9f1261c..c8550c013 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/FindingsRestApiIT.kt @@ -12,11 +12,12 @@ import org.opensearch.alerting.randomDocumentLevelTrigger import org.opensearch.commons.alerting.model.DocLevelMonitorInput import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.test.junit.annotations.TestLogging +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class FindingsRestApiIT : AlertingRestTestCase() { - + @Test fun `test find Finding where doc is not retrieved`() { val testIndex = createTestIndex() val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) @@ -31,6 +32,7 @@ class FindingsRestApiIT : AlertingRestTestCase() { assertFalse(response.findings[0].documents[0].found) } + @Test fun `test find Finding where source docData is null`() { val testIndex = createTestIndex() val testDoc = """{ @@ -58,6 +60,7 @@ class FindingsRestApiIT : AlertingRestTestCase() { assertFalse(responseAfterDelete.findings[0].documents[0].found) } + @Test fun `test find Finding where doc is retrieved`() { val testIndex = createTestIndex() val testDoc = """{ @@ -98,6 +101,7 @@ class FindingsRestApiIT : AlertingRestTestCase() { } } + @Test fun `test find Finding for specific finding by id`() { val testIndex = createTestIndex() val testDoc = """{ @@ -129,6 +133,7 @@ class FindingsRestApiIT : AlertingRestTestCase() { assertEquals(testDoc2, response.findings[0].documents[1].document) } + @Test fun `test find Finding by tag`() { val testIndex = createTestIndex() val testDoc = """{ @@ -149,11 +154,12 @@ class FindingsRestApiIT : AlertingRestTestCase() { executeMonitor(trueMonitor.id, mapOf(Pair("dryrun", "true"))) createFinding(matchingDocIds = listOf("someId"), index = testIndex) - val findingId = createFinding( - matchingDocIds = listOf("someId", "someId2"), - index = testIndex, - docLevelQueries = listOf(docLevelQuery) - ) + val findingId = + createFinding( + matchingDocIds = listOf("someId", "someId2"), + index = testIndex, + docLevelQueries = listOf(docLevelQuery), + ) val response = searchFindings(mapOf(Pair("searchString", "sigma"))) assertEquals(1, response.totalFindings) assertEquals(findingId, response.findings[0].finding.id) @@ -164,6 +170,7 @@ class FindingsRestApiIT : AlertingRestTestCase() { assertEquals(testDoc2, response.findings[0].documents[1].document) } + @Test fun `test find Finding by name`() { val testIndex = createTestIndex() val testDoc = """{ @@ -184,11 +191,12 @@ class FindingsRestApiIT : AlertingRestTestCase() { executeMonitor(trueMonitor.id, mapOf(Pair("dryrun", "true"))) createFinding(matchingDocIds = listOf("someId"), index = testIndex) - val findingId = createFinding( - matchingDocIds = listOf("someId", "someId2"), - index = testIndex, - docLevelQueries = listOf(docLevelQuery) - ) + val findingId = + createFinding( + matchingDocIds = listOf("someId", "someId2"), + index = testIndex, + docLevelQueries = listOf(docLevelQuery), + ) val response = searchFindings(mapOf(Pair("searchString", "realQuery"))) assertEquals(1, response.totalFindings) assertEquals(findingId, response.findings[0].finding.id) @@ -199,6 +207,7 @@ class FindingsRestApiIT : AlertingRestTestCase() { assertEquals(testDoc2, response.findings[0].documents[1].document) } + @Test fun `test find Finding by monitor id`() { val testIndex = createTestIndex() val testDoc = """{ @@ -219,12 +228,13 @@ class FindingsRestApiIT : AlertingRestTestCase() { executeMonitor(trueMonitor.id, mapOf(Pair("dryrun", "true"))) createFinding(matchingDocIds = listOf("someId"), index = testIndex) - val findingId = createFinding( - monitorId = "monitorToFind", - matchingDocIds = listOf("someId", "someId2"), - index = testIndex, - docLevelQueries = listOf(docLevelQuery) - ) + val findingId = + createFinding( + monitorId = "monitorToFind", + matchingDocIds = listOf("someId", "someId2"), + index = testIndex, + docLevelQueries = listOf(docLevelQuery), + ) val response = searchFindings(mapOf(Pair("searchString", "monitorToFind"))) assertEquals(1, response.totalFindings) assertEquals(findingId, response.findings[0].finding.id) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt index 64a5b9c40..4a63b9ae9 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorRestApiIT.kt @@ -67,16 +67,17 @@ import java.time.Instant import java.time.ZoneId import java.time.temporal.ChronoUnit import java.util.concurrent.TimeUnit +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class MonitorRestApiIT : AlertingRestTestCase() { - companion object { val USE_TYPED_KEYS = ToXContent.MapParams(mapOf("with_type" to "true")) } @Throws(Exception::class) + @Test fun `test plugin is loaded`() { val response = entityAsMap(OpenSearchRestTestCase.client().makeRequest("GET", "_nodes/plugins")) val nodesInfo = response["nodes"] as Map> @@ -91,6 +92,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { fail("Plugin not installed") } + @Test fun `test parsing monitor as a scheduled job`() { val monitor = createRandomMonitor() @@ -102,6 +104,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test creating a monitor`() { val monitor = randomQueryLevelMonitor() @@ -117,6 +120,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test creating a bucket monitor`() { val monitor = randomBucketLevelMonitor() @@ -131,6 +135,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals("Incorrect Location header", "$ALERTING_BASE_URI/$createdId", createResponse.getHeader("Location")) } + @Test fun `test creating a monitor with legacy ODFE`() { val monitor = randomQueryLevelMonitor() val createResponse = client().makeRequest("POST", LEGACY_OPENDISTRO_ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) @@ -142,6 +147,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertTrue("incorrect version", createdVersion > 0) } + @Test fun `test creating a monitor with action threshold greater than max threshold`() { val monitor = randomMonitorWithThrottle(100000, ChronoUnit.MINUTES) @@ -152,6 +158,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test creating a monitor with action threshold less than min threshold`() { val monitor = randomMonitorWithThrottle(-1) @@ -162,6 +169,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test creating a monitor with updating action threshold`() { adminClient().updateSettings("plugins.alerting.action_throttle_max_value", TimeValue.timeValueHours(1)) @@ -175,6 +183,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { adminClient().updateSettings("plugins.alerting.action_throttle_max_value", TimeValue.timeValueHours(24)) } + @Test fun `test creating a monitor with PUT fails`() { try { val monitor = randomQueryLevelMonitor() @@ -185,6 +194,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test creating a monitor with illegal index name`() { try { val si = SearchInput(listOf("_#*IllegalIndexCharacters"), SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) @@ -196,11 +206,12 @@ class MonitorRestApiIT : AlertingRestTestCase() { // Without security plugin we get BAD_REQUEST correctly. With security_plugin we get INTERNAL_SERVER_ERROR, till above issue is fixed. assertTrue( "Unexpected status", - listOf(RestStatus.BAD_REQUEST, RestStatus.FORBIDDEN).contains(e.response.restStatus()) + listOf(RestStatus.BAD_REQUEST, RestStatus.FORBIDDEN).contains(e.response.restStatus()), ) } } + @Test fun `test creating an AD monitor without detector index`() { try { val monitor = randomADMonitor() @@ -211,11 +222,12 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertTrue("Unexpected error", e.message!!.contains("Configured indices are not found")) assertTrue( "Unexpected status", - listOf(RestStatus.NOT_FOUND).contains(e.response.restStatus()) + listOf(RestStatus.NOT_FOUND).contains(e.response.restStatus()), ) } } + @Test fun `test creating an AD monitor with detector index created but no detectors`() { createAnomalyDetectorIndex() try { @@ -226,11 +238,12 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertTrue("Unexpected error", e.message!!.contains("User has no available detectors")) assertTrue( "Unexpected status", - listOf(RestStatus.NOT_FOUND).contains(e.response.restStatus()) + listOf(RestStatus.NOT_FOUND).contains(e.response.restStatus()), ) } } + @Test fun `test creating an AD monitor with no detector has monitor backend role`() { if (!securityEnabled()) { createAnomalyDetectorIndex() @@ -245,7 +258,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertTrue("Unexpected error", e.message!!.contains("User has no available detectors")) assertTrue( "Unexpected status", - listOf(RestStatus.NOT_FOUND).contains(e.response.restStatus()) + listOf(RestStatus.NOT_FOUND).contains(e.response.restStatus()), ) } } @@ -312,17 +325,22 @@ class MonitorRestApiIT : AlertingRestTestCase() { */ @Throws(Exception::class) + @Test fun `test updating search for a monitor`() { val monitor = createRandomMonitor() - val updatedSearch = SearchInput( - emptyList(), - SearchSourceBuilder().query(QueryBuilders.termQuery("foo", "bar")) - ) - val updateResponse = client().makeRequest( - "PUT", monitor.relativeUrl(), - emptyMap(), monitor.copy(inputs = listOf(updatedSearch)).toHttpEntity() - ) + val updatedSearch = + SearchInput( + emptyList(), + SearchSourceBuilder().query(QueryBuilders.termQuery("foo", "bar")), + ) + val updateResponse = + client().makeRequest( + "PUT", + monitor.relativeUrl(), + emptyMap(), + monitor.copy(inputs = listOf(updatedSearch)).toHttpEntity(), + ) assertEquals("Update monitor failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -334,21 +352,26 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test updating conditions for a monitor`() { val monitor = createRandomMonitor() - val updatedTriggers = listOf( - QueryLevelTrigger( - name = "foo", - severity = "1", - condition = Script("return true"), - actions = emptyList() + val updatedTriggers = + listOf( + QueryLevelTrigger( + name = "foo", + severity = "1", + condition = Script("return true"), + actions = emptyList(), + ), + ) + val updateResponse = + client().makeRequest( + "PUT", + monitor.relativeUrl(), + emptyMap(), + monitor.copy(triggers = updatedTriggers).toHttpEntity(), ) - ) - val updateResponse = client().makeRequest( - "PUT", monitor.relativeUrl(), - emptyMap(), monitor.copy(triggers = updatedTriggers).toHttpEntity() - ) assertEquals("Update monitor failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -360,14 +383,18 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test updating schedule for a monitor`() { val monitor = createRandomMonitor() val updatedSchedule = CronSchedule(expression = "0 9 * * *", timezone = ZoneId.of("UTC")) - val updateResponse = client().makeRequest( - "PUT", monitor.relativeUrl(), - emptyMap(), monitor.copy(schedule = updatedSchedule).toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + monitor.relativeUrl(), + emptyMap(), + monitor.copy(schedule = updatedSchedule).toHttpEntity(), + ) assertEquals("Update monitor failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -379,6 +406,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test getting a monitor`() { val monitor = createRandomMonitor() @@ -388,6 +416,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test getting a monitor that doesn't exist`() { try { getMonitor(randomAlphaOfLength(20)) @@ -398,6 +427,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test get monitor returns 404 when alerting config index is missing`() { try { deleteIndex(".opendistro-alerting-config") @@ -417,12 +447,13 @@ class MonitorRestApiIT : AlertingRestTestCase() { errorMessage.contains("Monitor not found") || errorMessage.contains("index not found") || errorMessage.contains("no such index") || - errorMessage.contains("Configured indices are not found") + errorMessage.contains("Configured indices are not found"), ) } } @Throws(Exception::class) + @Test fun `test checking if a monitor exists`() { val monitor = createRandomMonitor() @@ -431,12 +462,14 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertNull("Response contains unexpected body", headResponse.entity) } + @Test fun `test checking if a non-existent monitor exists`() { val headResponse = client().makeRequest("HEAD", "$ALERTING_BASE_URI/foobarbaz") assertEquals("Unexpected status", RestStatus.NOT_FOUND, headResponse.restStatus()) } @Throws(Exception::class) + @Test fun `test deleting a monitor`() { val monitor = createRandomMonitor() @@ -448,6 +481,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test deleting a monitor that doesn't exist`() { try { client().makeRequest("DELETE", "$ALERTING_BASE_URI/foobarbaz") @@ -457,15 +491,18 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test getting UI metadata monitor not from OpenSearch Dashboards`() { val monitor = createRandomMonitor(withMetadata = true) val getMonitor = getMonitor(monitorId = monitor.id) assertEquals( "UI Metadata returned but request did not come from OpenSearch Dashboards.", - getMonitor.uiMetadata, mapOf() + getMonitor.uiMetadata, + mapOf(), ) } + @Test fun `test getting UI metadata monitor from OpenSearch Dashboards`() { val monitor = createRandomMonitor(refresh = true, withMetadata = true) val header = BasicHeader(HttpHeaders.USER_AGENT, "OpenSearch-Dashboards") @@ -473,15 +510,18 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals("", monitor.uiMetadata, getMonitor.uiMetadata) } + @Test fun `test query a monitor that exists`() { val monitor = createRandomMonitor(true) val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() - val searchResponse = client().makeRequest( - "GET", "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -489,15 +529,18 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals("Monitor not found during search", 1, numberDocsFound) } + @Test fun `test query a monitor that exists POST`() { val monitor = createRandomMonitor(true) val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() - val searchResponse = client().makeRequest( - "POST", "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -505,22 +548,26 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals("Monitor not found during search", 1, numberDocsFound) } + @Test fun `test query a monitor that doesn't exist`() { // Create a random monitor to create the ScheduledJob index. Otherwise we test will fail with 404 index not found. createRandomMonitor(refresh = true) - val search = SearchSourceBuilder().query( - QueryBuilders.termQuery( - OpenSearchTestCase.randomAlphaOfLength(5), - OpenSearchTestCase.randomAlphaOfLength(5) + val search = + SearchSourceBuilder() + .query( + QueryBuilders.termQuery( + OpenSearchTestCase.randomAlphaOfLength(5), + OpenSearchTestCase.randomAlphaOfLength(5), + ), + ).toString() + + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), ) - ).toString() - - val searchResponse = client().makeRequest( - "GET", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -529,6 +576,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test search monitor returns empty response when index is missing`() { try { deleteIndex(".opendistro-alerting-config") @@ -537,67 +585,76 @@ class MonitorRestApiIT : AlertingRestTestCase() { throw e } } - val searchBody = """ + val searchBody = + """ { "query": { "match_all": {} } } - """.trimIndent() - val response = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(searchBody, ContentType.APPLICATION_JSON) - ) + """.trimIndent() + val response = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(searchBody, ContentType.APPLICATION_JSON), + ) val responseBody = response.asMap() val total = ((responseBody["hits"] as? Map<*, *>)?.get("total") as? Map<*, *>)?.get("value") as? Int ?: 0 assertEquals("Expected no search results when config index is missing", 0, total) } @Throws(Exception::class) + @Test fun `test search monitor fails with unexpected error`() { - val invalidSearchBody = """ + val invalidSearchBody = + """ { "query": { "bad_query_type": {} } } - """.trimIndent() + """.trimIndent() try { client().makeRequest( "POST", "$ALERTING_BASE_URI/_search", emptyMap(), - StringEntity(invalidSearchBody, ContentType.APPLICATION_JSON) + StringEntity(invalidSearchBody, ContentType.APPLICATION_JSON), ) fail("Expected failure due to bad query") } catch (e: ResponseException) { - val responseBody = e.response.entity.content.bufferedReader().use { it.readText() } + val responseBody = + e.response.entity.content + .bufferedReader() + .use { it.readText() } assertTrue( "Should receive an error from unexpected query type", e.response.restStatus() === RestStatus.BAD_REQUEST || - e.response.restStatus() === RestStatus.INTERNAL_SERVER_ERROR + e.response.restStatus() === RestStatus.INTERNAL_SERVER_ERROR, ) assertTrue( "Response body should indicate query parsing error", responseBody.contains("parsing_exception") || - responseBody.contains("failed to parse") + responseBody.contains("failed to parse"), ) } } + @Test fun `test query a monitor with UI metadata from OpenSearch Dashboards`() { val monitor = createRandomMonitor(refresh = true, withMetadata = true) val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() val header = BasicHeader(HttpHeaders.USER_AGENT, "OpenSearch-Dashboards") - val searchResponse = client().makeRequest( - "GET", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON), - header - ) + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + header, + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) @@ -610,19 +667,21 @@ class MonitorRestApiIT : AlertingRestTestCase() { val monitorHit = hit["_source"] as Map assertNotNull( "UI Metadata returned from search but request did not come from OpenSearchDashboards", - monitorHit[Monitor.UI_METADATA_FIELD] + monitorHit[Monitor.UI_METADATA_FIELD], ) } + @Test fun `test query a monitor with UI metadata as user`() { val monitor = createRandomMonitor(refresh = true, withMetadata = true) val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() - val searchResponse = client().makeRequest( - "GET", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) @@ -635,10 +694,11 @@ class MonitorRestApiIT : AlertingRestTestCase() { val monitorHit = hit["_source"] as Map assertNull( "UI Metadata returned from search but request did not come from OpenSearchDashboards", - monitorHit[Monitor.UI_METADATA_FIELD] + monitorHit[Monitor.UI_METADATA_FIELD], ) } + @Test fun `test acknowledge all alert states`() { putAlertMappings() // Required as we do not have a create alert API. val monitor = createRandomMonitor(refresh = true) @@ -665,6 +725,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertFalse("Alert in state ${activeAlert.state} found in failed list", failedResponseList.contains(activeAlert.id)) } + @Test fun `test acknowledging more than 10 alerts at once`() { // GIVEN putAlertMappings() // Required as we do not have a create alert API. @@ -681,7 +742,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { val acknowledgedAlerts = responseMap["success"] as List assertTrue( "Expected $expectedAcknowledgedCount alerts to be acknowledged successfully.", - acknowledgedAlerts.size == expectedAcknowledgedCount + acknowledgedAlerts.size == expectedAcknowledgedCount, ) val acknowledgedAlertsList = acknowledgedAlerts.toString() @@ -693,6 +754,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertTrue("Expected 0 alerts to fail acknowledgment.", failedResponse.isEmpty()) } + @Test fun `test acknowledging more than 10 alerts at once, including acknowledged alerts`() { // GIVEN putAlertMappings() // Required as we do not have a create alert API. @@ -715,7 +777,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { val acknowledgedAlerts = responseMap["success"] as List assertTrue( "Expected $expectedAcknowledgedCount alerts to be acknowledged successfully.", - acknowledgedAlerts.size == expectedAcknowledgedCount + acknowledgedAlerts.size == expectedAcknowledgedCount, ) val acknowledgedAlertsList = acknowledgedAlerts.toString() @@ -739,6 +801,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test acknowledging 0 alerts`() { // GIVEN putAlertMappings() // Required as we do not have a create alert API. @@ -754,6 +817,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test get all alerts in all states`() { putAlertMappings() // Required as we do not have a create alert API. val monitor = createRandomMonitor(refresh = true) @@ -777,6 +841,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertFalse("Invalid alert with id, ${invalidAlert.id}, found in alert list", alerts.contains(invalidAlert.id)) } + @Test fun `test get all alerts with active states`() { putAlertMappings() // Required as we do not have a create alert API. val monitor = createRandomMonitor(refresh = true) @@ -800,6 +865,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertFalse("Invalid alert with id, ${invalidAlert.id}, found in alert list", alerts.contains(invalidAlert.id)) } + @Test fun `test get all alerts with severity 1`() { putAlertMappings() // Required as we do not have a create alert API. val monitor = createRandomMonitor(refresh = true) @@ -817,13 +883,14 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals(2, responseMap["totalAlerts"]) assertTrue( "Acknowledged sev 1 alert with id, ${acknowledgedAlert.id}, not found in alert list", - alerts.contains(acknowledgedAlert.id) + alerts.contains(acknowledgedAlert.id), ) assertFalse("Completed sev 3 alert with id, ${completedAlert.id}, found in alert list", alerts.contains(completedAlert.id)) assertTrue("Error sev 1 alert with id, ${errorAlert.id}, not found in alert list", alerts.contains(errorAlert.id)) assertFalse("Active sev 2 alert with id, ${activeAlert.id}, found in alert list", alerts.contains(activeAlert.id)) } + @Test fun `test get all alerts for a specific monitor by id`() { putAlertMappings() // Required as we do not have a create alert API. val monitor = createRandomMonitor(refresh = true) @@ -842,13 +909,14 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals(2, responseMap["totalAlerts"]) assertTrue( "Acknowledged alert for chosen monitor with id, ${acknowledgedAlert.id}, not found in alert list", - alerts.contains(acknowledgedAlert.id) + alerts.contains(acknowledgedAlert.id), ) assertFalse("Completed sev 3 alert with id, ${completedAlert.id}, found in alert list", alerts.contains(completedAlert.id)) assertTrue("Error alert for chosen monitor with id, ${errorAlert.id}, not found in alert list", alerts.contains(errorAlert.id)) assertFalse("Active alert sev 2 with id, ${activeAlert.id}, found in alert list", alerts.contains(activeAlert.id)) } + @Test fun `test get alerts by searching monitor name`() { putAlertMappings() // Required as we do not have a create alert API. @@ -868,28 +936,31 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals(2, responseMap["totalAlerts"]) assertTrue( "Acknowledged alert for matching monitor with id, ${acknowledgedAlert.id}, not found in alert list", - alerts.contains(acknowledgedAlert.id) + alerts.contains(acknowledgedAlert.id), ) assertFalse("Completed sev 3 alert with id, ${completedAlert.id}, found in alert list", alerts.contains(completedAlert.id)) assertTrue("Error alert for matching monitor with id, ${errorAlert.id}, not found in alert list", alerts.contains(errorAlert.id)) assertFalse("Active alert sev 2 with id, ${activeAlert.id}, found in alert list", alerts.contains(activeAlert.id)) } + @Test fun `test mappings after monitor creation`() { createRandomMonitor(refresh = true) val response = client().makeRequest("GET", "/${ScheduledJob.SCHEDULED_JOBS_INDEX}/_mapping") val parserMap = createParser(XContentType.JSON.xContent(), response.entity.content).map() as Map> val mappingsMap = parserMap[ScheduledJob.SCHEDULED_JOBS_INDEX]!!["mappings"] as Map - val expected = createParser( - XContentType.JSON.xContent(), - javaClass.classLoader.getResource("mappings/scheduled-jobs.json").readText() - ) + val expected = + createParser( + XContentType.JSON.xContent(), + javaClass.classLoader.getResource("mappings/scheduled-jobs.json").readText(), + ) val expectedMap = expected.map() assertEquals("Mappings are different", expectedMap, mappingsMap) } + @Test fun `test delete monitor moves alerts`() { client().updateSettings(ScheduledJobSettings.SWEEPER_ENABLED.key, true) putAlertMappings() @@ -914,10 +985,11 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals( "Alert data incorrect", alert.copy(state = Alert.State.DELETED).toJsonString(), - historyAlerts.single().toJsonString() + historyAlerts.single().toJsonString(), ) } + @Test fun `test delete trigger moves alerts then try to search alert by monitorId to find alert in history index`() { client().updateSettings(ScheduledJobSettings.SWEEPER_ENABLED.key, true) putAlertMappings() @@ -926,12 +998,13 @@ class MonitorRestApiIT : AlertingRestTestCase() { val alert = createAlert(randomAlert(monitor).copy(triggerId = trigger.id, state = Alert.State.ACTIVE)) refreshIndex("*") val updatedMonitor = monitor.copy(triggers = emptyList()) - val updateResponse = client().makeRequest( - "PUT", - "$ALERTING_BASE_URI/${monitor.id}", - emptyMap(), - updatedMonitor.toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + "$ALERTING_BASE_URI/${monitor.id}", + emptyMap(), + updatedMonitor.toHttpEntity(), + ) assertEquals("Update request not successful", RestStatus.OK, updateResponse.restStatus()) // Wait 5 seconds for event to be processed and alerts moved @@ -950,6 +1023,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals(1, responseMap["totalAlerts"]) } + @Test fun `test delete trigger moves alerts`() { client().updateSettings(ScheduledJobSettings.SWEEPER_ENABLED.key, true) putAlertMappings() @@ -958,10 +1032,13 @@ class MonitorRestApiIT : AlertingRestTestCase() { val alert = createAlert(randomAlert(monitor).copy(triggerId = trigger.id, state = Alert.State.ACTIVE)) refreshIndex("*") val updatedMonitor = monitor.copy(triggers = emptyList()) - val updateResponse = client().makeRequest( - "PUT", "$ALERTING_BASE_URI/${monitor.id}", emptyMap(), - updatedMonitor.toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + "$ALERTING_BASE_URI/${monitor.id}", + emptyMap(), + updatedMonitor.toHttpEntity(), + ) assertEquals("Update request not successful", RestStatus.OK, updateResponse.restStatus()) // Wait 5 seconds for event to be processed and alerts moved @@ -979,10 +1056,11 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals( "Alert data incorrect", alert.copy(state = Alert.State.DELETED).toJsonString(), - historyAlerts.single().toJsonString() + historyAlerts.single().toJsonString(), ) } + @Test fun `test delete trigger moves alerts only for deleted trigger`() { client().updateSettings(ScheduledJobSettings.SWEEPER_ENABLED.key, true) putAlertMappings() @@ -993,10 +1071,13 @@ class MonitorRestApiIT : AlertingRestTestCase() { val alertDelete = createAlert(randomAlert(monitor).copy(triggerId = triggerToDelete.id, state = Alert.State.ACTIVE)) refreshIndex("*") val updatedMonitor = monitor.copy(triggers = listOf(triggerToKeep)) - val updateResponse = client().makeRequest( - "PUT", "$ALERTING_BASE_URI/${monitor.id}", emptyMap(), - updatedMonitor.toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + "$ALERTING_BASE_URI/${monitor.id}", + emptyMap(), + updatedMonitor.toHttpEntity(), + ) assertEquals("Update request not successful", RestStatus.OK, updateResponse.restStatus()) // Wait until postIndex hook is executed due to monitor update waitUntil({ @@ -1025,16 +1106,19 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals( "Alert data incorrect", alertDelete.copy(state = Alert.State.DELETED).toJsonString(), - historyAlerts.single().toJsonString() + historyAlerts.single().toJsonString(), ) } + @Test fun `test update monitor with wrong version`() { val monitor = createRandomMonitor(refresh = true) try { client().makeRequest( - "PUT", "${monitor.relativeUrl()}?refresh=true&if_seq_no=1234&if_primary_term=1234", - emptyMap(), monitor.toHttpEntity() + "PUT", + "${monitor.relativeUrl()}?refresh=true&if_seq_no=1234&if_primary_term=1234", + emptyMap(), + monitor.toHttpEntity(), ) fail("expected 409 ResponseException") } catch (e: ResponseException) { @@ -1042,6 +1126,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test monitor stats disable plugin`() { // Disable the Monitor plugin. disableScheduledJob() @@ -1049,19 +1134,22 @@ class MonitorRestApiIT : AlertingRestTestCase() { val responseMap = getAlertingStats() assertAlertingStatsSweeperEnabled(responseMap, false) assertEquals("Scheduled job index exists but there are no scheduled jobs.", false, responseMap["scheduled_job_index_exists"]) - val _nodes = responseMap["_nodes"] as Map - validateAlertingStatsNodeResponse(_nodes) + val nodes = responseMap["_nodes"] as Map + validateAlertingStatsNodeResponse(nodes) } + @Test fun `test monitor stats when disabling and re-enabling scheduled jobs with existing monitor`() { // Enable Monitor jobs enableScheduledJob() val monitorId = createMonitor(randomQueryLevelMonitor(enabled = true), refresh = true).id - if (isMultiNode) OpenSearchTestCase.waitUntil({ - return@waitUntil false - }, 2, TimeUnit.SECONDS) + if (isMultiNode) { + OpenSearchTestCase.waitUntil({ + return@waitUntil false + }, 2, TimeUnit.SECONDS) + } var alertingStats = getAlertingStats() assertAlertingStatsSweeperEnabled(alertingStats, true) assertEquals("Scheduled job index does not exist", true, alertingStats["scheduled_job_index_exists"]) @@ -1072,12 +1160,12 @@ class MonitorRestApiIT : AlertingRestTestCase() { } assertEquals("Nodes are not on schedule", numberOfNodes, alertingStats["nodes_on_schedule"]) - val _nodes = alertingStats["_nodes"] as Map - validateAlertingStatsNodeResponse(_nodes) + val nodes = alertingStats["_nodes"] as Map + validateAlertingStatsNodeResponse(nodes) assertTrue( "Monitor [$monitorId] was not found scheduled based on the alerting stats response: $alertingStats", - isMonitorScheduled(monitorId, alertingStats) + isMonitorScheduled(monitorId, alertingStats), ) // Disable Monitor jobs @@ -1087,7 +1175,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertAlertingStatsSweeperEnabled(alertingStats, false) assertFalse( "Monitor [$monitorId] was still scheduled based on the alerting stats response: $alertingStats", - isMonitorScheduled(monitorId, alertingStats) + isMonitorScheduled(monitorId, alertingStats), ) // Re-enable Monitor jobs @@ -1102,10 +1190,11 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertAlertingStatsSweeperEnabled(alertingStats, true) assertTrue( "Monitor [$monitorId] was not re-scheduled based on the alerting stats response: $alertingStats", - isMonitorScheduled(monitorId, alertingStats) + isMonitorScheduled(monitorId, alertingStats), ) } + @Test fun `test monitor stats no jobs`() { // Enable the Monitor plugin. enableScheduledJob() @@ -1113,19 +1202,22 @@ class MonitorRestApiIT : AlertingRestTestCase() { val responseMap = getAlertingStats() assertAlertingStatsSweeperEnabled(responseMap, true) assertEquals("Scheduled job index exists but there are no scheduled jobs.", false, responseMap["scheduled_job_index_exists"]) - val _nodes = responseMap["_nodes"] as Map - validateAlertingStatsNodeResponse(_nodes) + val nodes = responseMap["_nodes"] as Map + validateAlertingStatsNodeResponse(nodes) } + @Test fun `test monitor stats jobs`() { // Enable the Monitor plugin. enableScheduledJob() createRandomMonitor(refresh = true) - if (isMultiNode) OpenSearchTestCase.waitUntil({ - return@waitUntil false - }, 2, TimeUnit.SECONDS) + if (isMultiNode) { + OpenSearchTestCase.waitUntil({ + return@waitUntil false + }, 2, TimeUnit.SECONDS) + } val responseMap = getAlertingStats() assertAlertingStatsSweeperEnabled(responseMap, true) assertEquals("Scheduled job index does not exist", true, responseMap["scheduled_job_index_exists"]) @@ -1136,11 +1228,12 @@ class MonitorRestApiIT : AlertingRestTestCase() { } assertEquals("Nodes are not on schedule", numberOfNodes, responseMap["nodes_on_schedule"]) - val _nodes = responseMap["_nodes"] as Map - validateAlertingStatsNodeResponse(_nodes) + val nodes = responseMap["_nodes"] as Map + validateAlertingStatsNodeResponse(nodes) } @Throws(Exception::class) + @Test fun `test max number of monitors`() { client().updateSettings(AlertingSettings.ALERTING_MAX_MONITORS.key, "1") @@ -1153,14 +1246,17 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test monitor specific metric`() { // Enable the Monitor plugin. enableScheduledJob() createRandomMonitor(refresh = true) - if (isMultiNode) OpenSearchTestCase.waitUntil({ - return@waitUntil false - }, 2, TimeUnit.SECONDS) + if (isMultiNode) { + OpenSearchTestCase.waitUntil({ + return@waitUntil false + }, 2, TimeUnit.SECONDS) + } val responseMap = getAlertingStats("/jobs_info") assertAlertingStatsSweeperEnabled(responseMap, true) assertEquals("Scheduled job index does not exist", true, responseMap["scheduled_job_index_exists"]) @@ -1171,10 +1267,11 @@ class MonitorRestApiIT : AlertingRestTestCase() { } assertEquals("Nodes not on schedule", numberOfNodes, responseMap["nodes_on_schedule"]) - val _nodes = responseMap["_nodes"] as Map - validateAlertingStatsNodeResponse(_nodes) + val nodes = responseMap["_nodes"] as Map + validateAlertingStatsNodeResponse(nodes) } + @Test fun `test monitor stats incorrect metric`() { try { getAlertingStats("/foobarzzz") @@ -1184,6 +1281,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test monitor stats _all and other metric`() { try { getAlertingStats("/_all,jobs_info") @@ -1193,7 +1291,10 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } - private fun randomMonitorWithThrottle(value: Int, unit: ChronoUnit = ChronoUnit.MINUTES): Monitor { + private fun randomMonitorWithThrottle( + value: Int, + unit: ChronoUnit = ChronoUnit.MINUTES, + ): Monitor { val throttle = randomThrottle(value, unit) val action = randomAction().copy(throttle = throttle) val trigger = randomQueryLevelTrigger(actions = listOf(action)) @@ -1201,8 +1302,8 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test search monitors only`() { - // 1. create monitor val monitor = randomQueryLevelMonitor() val createResponse = client().makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) @@ -1210,26 +1311,28 @@ class MonitorRestApiIT : AlertingRestTestCase() { // 2. create destination val chime = Chime("http://abc.com") - val destination = Destination( - type = DestinationType.CHIME, - name = "test", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = chime, - slack = null, - customWebhook = null, - email = null - ) + val destination = + Destination( + type = DestinationType.CHIME, + name = "test", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = chime, + slack = null, + customWebhook = null, + email = null, + ) createDestination(destination) // 3. search - must return only monitors. val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() - val searchResponse = client().makeRequest( - "GET", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> @@ -1243,6 +1346,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test search monitor with alerting indices only`() { // 1. search - must return error as invalid index is passed val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() @@ -1253,7 +1357,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/_search", params, - StringEntity(search, ContentType.APPLICATION_JSON) + StringEntity(search, ContentType.APPLICATION_JSON), ) } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) @@ -1261,6 +1365,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test creating a document monitor`() { val testIndex = createTestIndex() val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) @@ -1282,15 +1387,17 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test getting a document level monitor`() { val testIndex = createTestIndex() val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor(inputs = listOf(docLevelInput), triggers = listOf(trigger), user = null) - ) + val monitor = + createMonitor( + randomDocumentLevelMonitor(inputs = listOf(docLevelInput), triggers = listOf(trigger), user = null), + ) val storedMonitor = getMonitor(monitor.id) @@ -1298,6 +1405,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test updating conditions for a doc-level monitor`() { val testIndex = createTestIndex() val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) @@ -1306,18 +1414,22 @@ class MonitorRestApiIT : AlertingRestTestCase() { val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) val monitor = createMonitor(randomDocumentLevelMonitor(inputs = listOf(docLevelInput), triggers = listOf(trigger))) - val updatedTriggers = listOf( - DocumentLevelTrigger( - name = "foo", - severity = "1", - condition = Script("return true"), - actions = emptyList() + val updatedTriggers = + listOf( + DocumentLevelTrigger( + name = "foo", + severity = "1", + condition = Script("return true"), + actions = emptyList(), + ), + ) + val updateResponse = + client().makeRequest( + "PUT", + monitor.relativeUrl(), + emptyMap(), + monitor.copy(triggers = updatedTriggers).toHttpEntity(), ) - ) - val updateResponse = client().makeRequest( - "PUT", monitor.relativeUrl(), - emptyMap(), monitor.copy(triggers = updatedTriggers).toHttpEntity() - ) assertEquals("Update monitor failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -1329,6 +1441,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test deleting a document level monitor`() { val testIndex = createTestIndex() val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) @@ -1344,6 +1457,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals("Deleted monitor still exists", RestStatus.NOT_FOUND, getResponse.restStatus()) } + @Test fun `test creating a document monitor with error trigger`() { val trigger = randomQueryLevelTrigger() try { @@ -1354,11 +1468,12 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals( "a document monitor with error trigger", "Incompatible trigger [${trigger.id}] for monitor type [${Monitor.MonitorType.DOC_LEVEL_MONITOR}]", - e.message + e.message, ) } } + @Test fun `test creating a query monitor with error trigger`() { val trigger = randomBucketLevelTrigger() try { @@ -1369,11 +1484,12 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals( "a query monitor with error trigger", "Incompatible trigger [${trigger.id}] for monitor type [${Monitor.MonitorType.QUERY_LEVEL_MONITOR}]", - e.message + e.message, ) } } + @Test fun `test creating and updating a document monitor with invalid query name`() { // creating a monitor with an invalid query name val invalidQueryName = "_Invalid .. query ! n>ame" @@ -1388,8 +1504,9 @@ class MonitorRestApiIT : AlertingRestTestCase() { fail("Doc level monitor with invalid query name should be rejected") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) - val expectedMessage = "Doc level query name may not start with [_, +, -], contain '..', or contain: " + - getInvalidNameChars().replace("\\", "") + val expectedMessage = + "Doc level query name may not start with [_, +, -], contain '..', or contain: " + + getInvalidNameChars().replace("\\", "") e.message?.let { assertTrue(it.contains(expectedMessage)) } } @@ -1406,14 +1523,17 @@ class MonitorRestApiIT : AlertingRestTestCase() { try { client().makeRequest( - "PUT", monitor.relativeUrl(), - emptyMap(), monitor.copy(inputs = listOf(updatedDocLevelInput)).toHttpEntity() + "PUT", + monitor.relativeUrl(), + emptyMap(), + monitor.copy(inputs = listOf(updatedDocLevelInput)).toHttpEntity(), ) fail("Doc level monitor with invalid query name should be rejected") } catch (e: ResponseException) { assertEquals("Unexpected status", RestStatus.BAD_REQUEST, e.response.restStatus()) - val expectedMessage = "Doc level query name may not start with [_, +, -], contain '..', or contain: " + - getInvalidNameChars().replace("\\", "") + val expectedMessage = + "Doc level query name may not start with [_, +, -], contain '..', or contain: " + + getInvalidNameChars().replace("\\", "") e.message?.let { assertTrue(it.contains(expectedMessage)) } } } @@ -1422,6 +1542,7 @@ class MonitorRestApiIT : AlertingRestTestCase() { * This use case is needed by the frontend plugin for displaying alert counts on the Monitors list page. * https://github.com/opensearch-project/alerting-dashboards-plugin/blob/main/server/services/MonitorService.js#L235 */ + @Test fun `test get acknowledged, active, error, and ignored alerts counts`() { putAlertMappings() val monitorAlertCounts = hashMapOf>() @@ -1436,13 +1557,14 @@ class MonitorRestApiIT : AlertingRestTestCase() { val numIgnoredAlerts = randomIntBetween(1, numCompletedAlerts) numCompletedAlerts -= numIgnoredAlerts - val alertCounts = hashMapOf( - Alert.State.ACKNOWLEDGED.name to numAcknowledgedAlerts, - Alert.State.ACTIVE.name to numActiveAlerts, - Alert.State.COMPLETED.name to numCompletedAlerts, - Alert.State.ERROR.name to numErrorAlerts, - "IGNORED" to numIgnoredAlerts - ) + val alertCounts = + hashMapOf( + Alert.State.ACKNOWLEDGED.name to numAcknowledgedAlerts, + Alert.State.ACTIVE.name to numActiveAlerts, + Alert.State.COMPLETED.name to numCompletedAlerts, + Alert.State.ERROR.name to numErrorAlerts, + "IGNORED" to numIgnoredAlerts, + ) monitorAlertCounts[monitor.id] = alertCounts repeat(numAcknowledgedAlerts) { @@ -1462,38 +1584,42 @@ class MonitorRestApiIT : AlertingRestTestCase() { } } - val sourceBuilder = SearchSourceBuilder() - .size(0) - .query(QueryBuilders.termsQuery("monitor_id", monitorAlertCounts.keys)) - .aggregation( - AggregationBuilders - .terms("uniq_monitor_ids").field("monitor_id") - .subAggregation(AggregationBuilders.filter("active", QueryBuilders.termQuery("state", "ACTIVE"))) - .subAggregation(AggregationBuilders.filter("acknowledged", QueryBuilders.termQuery("state", "ACKNOWLEDGED"))) - .subAggregation(AggregationBuilders.filter("errors", QueryBuilders.termQuery("state", "ERROR"))) - .subAggregation( - AggregationBuilders.filter( - "ignored", - QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery("state", "COMPLETED")) - .mustNot(QueryBuilders.existsQuery("acknowledged_time")) - ) - ) - .subAggregation(AggregationBuilders.max("last_notification_time").field("last_notification_time")) - .subAggregation( - AggregationBuilders.topHits("latest_alert") - .size(1) - .sort("start_time", SortOrder.DESC) - .fetchSource(arrayOf("last_notification_time", "trigger_name"), null) - ) - ) + val sourceBuilder = + SearchSourceBuilder() + .size(0) + .query(QueryBuilders.termsQuery("monitor_id", monitorAlertCounts.keys)) + .aggregation( + AggregationBuilders + .terms("uniq_monitor_ids") + .field("monitor_id") + .subAggregation(AggregationBuilders.filter("active", QueryBuilders.termQuery("state", "ACTIVE"))) + .subAggregation(AggregationBuilders.filter("acknowledged", QueryBuilders.termQuery("state", "ACKNOWLEDGED"))) + .subAggregation(AggregationBuilders.filter("errors", QueryBuilders.termQuery("state", "ERROR"))) + .subAggregation( + AggregationBuilders.filter( + "ignored", + QueryBuilders + .boolQuery() + .filter(QueryBuilders.termQuery("state", "COMPLETED")) + .mustNot(QueryBuilders.existsQuery("acknowledged_time")), + ), + ).subAggregation(AggregationBuilders.max("last_notification_time").field("last_notification_time")) + .subAggregation( + AggregationBuilders + .topHits("latest_alert") + .size(1) + .sort("start_time", SortOrder.DESC) + .fetchSource(arrayOf("last_notification_time", "trigger_name"), null), + ), + ) - val searchResponse = client().makeRequest( - "GET", - "$ALERTING_BASE_URI/_search", - hashMapOf("index" to AlertIndices.ALL_ALERT_INDEX_PATTERN), - StringEntity(sourceBuilder.toString(), ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + hashMapOf("index" to AlertIndices.ALL_ALERT_INDEX_PATTERN), + StringEntity(sourceBuilder.toString(), ContentType.APPLICATION_JSON), + ) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content).map() val aggregations = (xcp["aggregations"]!! as Map>) val uniqMonitorIds = aggregations["uniq_monitor_ids"]!! @@ -1507,25 +1633,29 @@ class MonitorRestApiIT : AlertingRestTestCase() { val acknowledged = (bucket["acknowledged"]!! as Map)["doc_count"]!! assertEquals( "Incorrect ${Alert.State.ACKNOWLEDGED} count returned for monitor $id", - monitorCounts[Alert.State.ACKNOWLEDGED.name], acknowledged + monitorCounts[Alert.State.ACKNOWLEDGED.name], + acknowledged, ) val active = (bucket["active"]!! as Map)["doc_count"]!! assertEquals( "Incorrect ${Alert.State.ACTIVE} count returned for monitor $id", - monitorCounts[Alert.State.ACTIVE.name], active + monitorCounts[Alert.State.ACTIVE.name], + active, ) val errors = (bucket["errors"]!! as Map)["doc_count"]!! assertEquals( "Incorrect ${Alert.State.ERROR} count returned for monitor $id", - monitorCounts[Alert.State.ERROR.name], errors + monitorCounts[Alert.State.ERROR.name], + errors, ) val ignored = (bucket["ignored"]!! as Map)["doc_count"]!! assertEquals( "Incorrect IGNORED count returned for monitor $id", - monitorCounts["IGNORED"], ignored + monitorCounts["IGNORED"], + ignored, ) } } @@ -1536,16 +1666,19 @@ class MonitorRestApiIT : AlertingRestTestCase() { assertEquals("More than $numberOfNodes successful node", numberOfNodes, nodesResponse["successful"]) } - private fun assertAlertingStatsSweeperEnabled(alertingStatsResponse: Map, expected: Boolean) { + private fun assertAlertingStatsSweeperEnabled( + alertingStatsResponse: Map, + expected: Boolean, + ) { assertEquals( "Legacy scheduled job enabled field is not set to $expected", expected, - alertingStatsResponse[statsResponseOpendistroSweeperEnabledField] + alertingStatsResponse[statsResponseOpendistroSweeperEnabledField], ) assertEquals( "Scheduled job is not ${if (expected) "enabled" else "disabled"}", expected, - alertingStatsResponse[statsResponseOpenSearchSweeperEnabledField] + alertingStatsResponse[statsResponseOpenSearchSweeperEnabledField], ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorV2RestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorV2RestApiIT.kt index dbf379e91..307f2f1c3 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorV2RestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/MonitorV2RestApiIT.kt @@ -45,6 +45,7 @@ import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.junit.annotations.TestLogging import java.time.temporal.ChronoUnit.MINUTES +import kotlin.test.Test /*** * Tests Alerting V2 CRUD and validations @@ -61,7 +62,8 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { client().updateSettings(AlertingV2Settings.ALERTING_V2_ENABLED.key, "true") } - /* Simple Case Tests */ + // Simple Case Tests + @Test fun `test create ppl monitor`() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) val pplMonitor = randomPPLMonitor() @@ -76,16 +78,19 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertEquals("incorrect version", 1, createdVersion) } + @Test fun `test update ppl monitor`() { val originalMonitor = createRandomPPLMonitor() val newMonitorConfig = randomPPLMonitor() - val updateResponse = client().makeRequest( - "PUT", - "$MONITOR_V2_BASE_URI/${originalMonitor.id}", - emptyMap(), newMonitorConfig.toHttpEntity() - ) + val updateResponse = + client().makeRequest( + "PUT", + "$MONITOR_V2_BASE_URI/${originalMonitor.id}", + emptyMap(), + newMonitorConfig.toHttpEntity(), + ) assertEquals("Update monitor failed", RestStatus.OK, updateResponse.restStatus()) val responseBody = updateResponse.asMap() @@ -96,6 +101,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertPplMonitorsEqual(newMonitorConfig, updatedMonitor) } + @Test fun `test get ppl monitor`() { // first create the monitor createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) @@ -131,29 +137,36 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertEquals( "Monitor V2 ID from Get Monitor doesn't match one from Create Monitor response", - pplMonitorId, id + pplMonitorId, + id, ) assertEquals( "Monitor V2 version from Get Monitor doesn't match one from Create Monitor response", - pplMonitorVersion, version + pplMonitorVersion, + version, ) assertPplMonitorsEqual(pplMonitor, storedPplMonitor) } + @Test fun `test head ppl monitor`() { val submittedPplMonitor = createRandomPPLMonitor() val response = client().makeRequest("HEAD", "$MONITOR_V2_BASE_URI/${submittedPplMonitor.id}") assertEquals("Unable to get monitorV2 ${submittedPplMonitor.id}", RestStatus.NO_CONTENT, response.restStatus()) } + @Test fun `test search ppl monitor with GET and match_all`() { createRandomPPLMonitor() val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() - val searchResponse = client().makeRequest( - "GET", "$MONITOR_V2_BASE_URI/_search", - emptyMap(), StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$MONITOR_V2_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) @@ -162,14 +175,18 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertEquals("PPL Monitor not found during search", 1, numberDocsFound) } + @Test fun `test search ppl monitor with POST and term query on ID`() { val pplMonitor = createRandomPPLMonitor() val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", pplMonitor.id)).toString() - val searchResponse = client().makeRequest( - "POST", "$MONITOR_V2_BASE_URI/_search", - emptyMap(), StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "POST", + "$MONITOR_V2_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) @@ -178,6 +195,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertEquals("PPL Monitor not found during search", 1, numberDocsFound) } + @Test fun `test delete ppl monitor`() { val pplMonitor = createRandomPPLMonitor() @@ -188,6 +206,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertEquals("Deleted monitor still exists", RestStatus.NOT_FOUND, getResponse.restStatus()) } + @Test fun `test parsing ppl monitor as a scheduled job`() { val monitorV2 = createRandomPPLMonitor() @@ -198,6 +217,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertEquals(monitorV2, scheduledJob) } + @Test fun `test monitor stats v1 and v2 only return stats for their respective monitors`() { enableScheduledJob() @@ -222,11 +242,13 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { assertFalse("V2 stats contains V1 Monitor", isMonitorScheduled(monitorV1Id, statsV2Response)) } - /* Validation Tests */ + // Validation Tests + @Test fun `test create ppl monitor that queries nonexistent index fails`() { - val pplMonitorConfig = randomPPLMonitor( - query = "source = nonexistent_index | head 10" - ) + val pplMonitorConfig = + randomPPLMonitor( + query = "source = nonexistent_index | head 10", + ) // ensure the request fails try { @@ -240,6 +262,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with more than max allowed monitors fails`() { adminClient().updateSettings(ALERTING_V2_MAX_MONITORS.key, 1) @@ -257,6 +280,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(1) } + @Test fun `test create ppl monitor with throttle greater than max fails`() { val maxThrottleDuration = 60L client().updateSettings(ALERTING_V2_MAX_THROTTLE_DURATION.key, maxThrottleDuration) @@ -265,10 +289,11 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { try { createRandomPPLMonitor( randomPPLMonitor( - triggers = listOf( - randomPPLTrigger(throttleDuration = maxThrottleDuration + 10) - ) - ) + triggers = + listOf( + randomPPLTrigger(throttleDuration = maxThrottleDuration + 10), + ), + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -279,6 +304,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with expire greater than max fails`() { val maxExpireDuration = 60L client().updateSettings(ALERTING_V2_MAX_EXPIRE_DURATION.key, maxExpireDuration) @@ -287,10 +313,11 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { try { createRandomPPLMonitor( randomPPLMonitor( - triggers = listOf( - randomPPLTrigger(expireDuration = maxExpireDuration + 10) - ) - ) + triggers = + listOf( + randomPPLTrigger(expireDuration = maxExpireDuration + 10), + ), + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -301,6 +328,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with look back window greater than max fails`() { val maxLookBackWindow = 60L client().updateSettings(ALERTING_V2_MAX_LOOK_BACK_WINDOW.key, maxLookBackWindow) @@ -309,8 +337,8 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { try { createRandomPPLMonitor( randomPPLMonitor( - lookBackWindow = maxLookBackWindow + 10 - ) + lookBackWindow = maxLookBackWindow + 10, + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -321,13 +349,14 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with invalid query fails`() { // ensure the request fails try { createRandomPPLMonitor( randomPPLMonitor( - query = "source = $TEST_INDEX_NAME | not valid ppl" - ) + query = "source = $TEST_INDEX_NAME | not valid ppl", + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -338,6 +367,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with query that's too long fails`() { adminClient().updateSettings(ALERTING_V2_MAX_QUERY_LENGTH.key, 1) @@ -345,8 +375,8 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { try { createRandomPPLMonitor( randomPPLMonitor( - query = "source = $TEST_INDEX_NAME | head 10" - ) + query = "source = $TEST_INDEX_NAME | head 10", + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -357,21 +387,23 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with invalid custom condition fails`() { // ensure the request fails try { createRandomPPLMonitor( randomPPLMonitor( - triggers = listOf( - randomPPLTrigger( - conditionType = ConditionType.CUSTOM, - customCondition = "not a valid PPL custom condition", - numResultsCondition = null, - numResultsValue = null - ) - ), - query = "source = $TEST_INDEX_NAME | head 10" - ) + triggers = + listOf( + randomPPLTrigger( + conditionType = ConditionType.CUSTOM, + customCondition = "not a valid PPL custom condition", + numResultsCondition = null, + numResultsValue = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -382,6 +414,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with custom condition that evals to num not bool fails`() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(1, MINUTES, "abc", 1) @@ -391,16 +424,17 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { try { createRandomPPLMonitor( randomPPLMonitor( - triggers = listOf( - randomPPLTrigger( - conditionType = ConditionType.CUSTOM, - customCondition = "eval something = sum * 2", - numResultsCondition = null, - numResultsValue = null - ) - ), - query = "source = $TEST_INDEX_NAME | stats sum(number) as sum by abc" - ) + triggers = + listOf( + randomPPLTrigger( + conditionType = ConditionType.CUSTOM, + customCondition = "eval something = sum * 2", + numResultsCondition = null, + numResultsValue = null, + ), + ), + query = "source = $TEST_INDEX_NAME | stats sum(number) as sum by abc", + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -411,6 +445,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with notification subject source too long fails`() { adminClient().updateSettings(NOTIFICATION_SUBJECT_SOURCE_MAX_LENGTH.key, 100) @@ -423,21 +458,25 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { try { createRandomPPLMonitor( randomPPLMonitor( - triggers = listOf( - randomPPLTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = "some message" + triggers = + listOf( + randomPPLTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = "some message", + ), + subjectTemplate = + randomTemplateScript( + source = subjectTooLong, + ), + ), ), - subjectTemplate = randomTemplateScript( - source = subjectTooLong - ) - ) - ) - ) - ) - ) + ), + ), + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -448,6 +487,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create ppl monitor with notification message source too long fails`() { adminClient().updateSettings(NOTIFICATION_MESSAGE_SOURCE_MAX_LENGTH.key, 1000) @@ -460,21 +500,25 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { try { createRandomPPLMonitor( randomPPLMonitor( - triggers = listOf( - randomPPLTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = messageTooLong + triggers = + listOf( + randomPPLTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = messageTooLong, + ), + subjectTemplate = + randomTemplateScript( + source = "some subject", + ), + ), ), - subjectTemplate = randomTemplateScript( - source = "some subject" - ) - ) - ) - ) - ) - ) + ), + ), + ), ) fail("Expected request to fail with BAD_REQUEST but it succeeded") } catch (e: ResponseException) { @@ -485,6 +529,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test get ppl monitor with invalid monitor ID length`() { val badId = UUIDs.base64UUID() + "extra" try { @@ -495,6 +540,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { } } + @Test fun `test update nonexistent ppl monitor fails`() { // the random monitor query searches index TEST_INDEX_NAME, // so we need to create that first to ensure at least the request body is valid @@ -511,6 +557,7 @@ class MonitorV2RestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete nonexistent ppl monitor fails`() { val randomId = UUIDs.base64UUID() diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureAlertingCommentsRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureAlertingCommentsRestApiIT.kt index 9a92ddec0..1741a0cf8 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureAlertingCommentsRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureAlertingCommentsRestApiIT.kt @@ -21,9 +21,9 @@ import org.opensearch.commons.rest.SecureRestClientBuilder import org.opensearch.core.rest.RestStatus import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder +import kotlin.test.Test class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { - companion object { @BeforeClass @JvmStatic fun setup() { @@ -41,15 +41,17 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { fun create() { if (userAClient == null) { createUser(userA, arrayOf()) - userAClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), userA, password) - .setSocketTimeout(60000) - .build() + userAClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), userA, password) + .setSocketTimeout(60000) + .build() } if (userBClient == null) { createUser(userB, arrayOf()) - userBClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), userB, password) - .setSocketTimeout(6000) - .build() + userBClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), userB, password) + .setSocketTimeout(6000) + .build() } client().updateSettings(ALERTING_COMMENTS_ENABLED.key, "true") } @@ -62,12 +64,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteUser(userB) } + @Test fun `test user with alerting full access can create comment`() { createUserWithRoles( userA, listOf(ALERTING_FULL_ACCESS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -79,12 +82,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_FULL_ACCESS_ROLE) } + @Test fun `test user with alerting full access can view comments`() { createUserWithRoles( userA, listOf(ALERTING_FULL_ACCESS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -105,12 +109,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_FULL_ACCESS_ROLE) } + @Test fun `test user with alerting full access can edit comment`() { createUserWithRoles( userA, listOf(ALERTING_FULL_ACCESS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -125,12 +130,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_FULL_ACCESS_ROLE) } + @Test fun `test user with alerting full access can delete comment`() { createUserWithRoles( userA, listOf(ALERTING_FULL_ACCESS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -144,12 +150,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_FULL_ACCESS_ROLE) } + @Test fun `test user with alerting ack alerts can create comment`() { createUserWithRoles( userA, listOf(ALERTING_ACK_ALERTS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -161,12 +168,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_ACK_ALERTS_ROLE) } + @Test fun `test user with alerting ack alerts can view comments`() { createUserWithRoles( userA, listOf(ALERTING_ACK_ALERTS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -187,12 +195,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_ACK_ALERTS_ROLE) } + @Test fun `test user with alerting ack alerts can edit comment`() { createUserWithRoles( userA, listOf(ALERTING_ACK_ALERTS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -207,12 +216,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_ACK_ALERTS_ROLE) } + @Test fun `test user with alerting ack alerts can delete comment`() { createUserWithRoles( userA, listOf(ALERTING_ACK_ALERTS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -226,12 +236,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_ACK_ALERTS_ROLE) } + @Test fun `test user with alerting read access cannot create comment`() { createUserWithRoles( userA, listOf(ALERTING_READ_ONLY_ACCESS), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) @@ -249,18 +260,19 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { } } + @Test fun `test user with alerting read access can view comments`() { createUserWithRoles( userA, listOf(ALERTING_READ_ONLY_ACCESS), listOf(), - false + false, ) createUserWithRoles( userB, listOf(ALERTING_FULL_ACCESS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -282,12 +294,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { deleteRoleMapping(ALERTING_FULL_ACCESS_ROLE) } + @Test fun `test user with no roles cannot create comment`() { createUserWithRoles( userA, listOf(), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) @@ -303,18 +316,19 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { } } + @Test fun `test user with no roles cannot view comments`() { createUserWithRoles( userA, listOf(), listOf(), - false + false, ) createUserWithRoles( userB, listOf(ALERTING_FULL_ACCESS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) val alert = createAlert(randomAlert(monitor).copy(state = Alert.State.ACTIVE)) @@ -337,6 +351,7 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { } } + @Test fun `test user cannot edit someone else's comment`() { createUser(userA, listOf().toTypedArray()) createUser(userB, listOf().toTypedArray()) @@ -359,12 +374,13 @@ class SecureAlertingCommentsRestApiIT : AlertingRestTestCase() { } } + @Test fun `test admin can edit someone else's comment`() { createUserWithRoles( userA, listOf(ALERTING_FULL_ACCESS_ROLE), listOf(), - false + false, ) val monitor = createRandomMonitor(refresh = true) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureDestinationRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureDestinationRestApiIT.kt index 1cf20bf47..2fff161a8 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureDestinationRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureDestinationRestApiIT.kt @@ -27,13 +27,12 @@ import org.opensearch.commons.rest.SecureRestClientBuilder import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class SecureDestinationRestApiIT : AlertingRestTestCase() { - companion object { - @BeforeClass @JvmStatic fun setup() { // things to execute once and keep around for the class @@ -46,55 +45,58 @@ class SecureDestinationRestApiIT : AlertingRestTestCase() { @Before fun create() { - if (userClient == null) { createUser(user, arrayOf()) - userClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + userClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() } } @After fun cleanup() { - userClient?.close() deleteUser(user) } + @Test fun `test create destination with disable filter by`() { disableFilterBy() val chime = Chime("http://abc.com") - val destination = Destination( - type = DestinationType.CHIME, - name = "test", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = chime, - slack = null, - customWebhook = null, - email = null - ) + val destination = + Destination( + type = DestinationType.CHIME, + name = "test", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = chime, + slack = null, + customWebhook = null, + email = null, + ) val createdDestination = createDestination(destination = destination) assertEquals("Incorrect destination name", createdDestination.name, "test") assertEquals("Incorrect destination type", createdDestination.type, DestinationType.CHIME) } + @Test fun `test get destinations with a destination type and disable filter by`() { disableFilterBy() val slack = Slack("url") - val destination = Destination( - type = DestinationType.SLACK, - name = "testSlack", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = slack, - customWebhook = null, - email = null - ) + val destination = + Destination( + type = DestinationType.SLACK, + name = "testSlack", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = slack, + customWebhook = null, + email = null, + ) // 1. create a destination as admin user createDestination(destination, true) @@ -108,20 +110,22 @@ class SecureDestinationRestApiIT : AlertingRestTestCase() { assertEquals(1, adminResponse.size) } + @Test fun `test get destinations with a destination type and filter by`() { enableFilterBy() val slack = Slack("url") - val destination = Destination( - type = DestinationType.SLACK, - name = "testSlack", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = slack, - customWebhook = null, - email = null - ) + val destination = + Destination( + type = DestinationType.SLACK, + name = "testSlack", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = slack, + customWebhook = null, + email = null, + ) // 1. create a destination as admin user createDestination(destination, true) @@ -136,25 +140,26 @@ class SecureDestinationRestApiIT : AlertingRestTestCase() { } // Destination related tests - + @Test fun `test get destination with an user with get destination role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_DESTINATION_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_DESTINATION_ACCESS), ) createDestination(getTestDestination()) try { - val getDestinationResponse = userClient?.makeRequest( - "GET", - AlertingPlugin.DESTINATION_BASE_URI, - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getDestinationResponse = + userClient?.makeRequest( + "GET", + AlertingPlugin.DESTINATION_BASE_URI, + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Index Email Group failed", RestStatus.OK, getDestinationResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailAccountRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailAccountRestApiIT.kt index 25c27b861..b168bd300 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailAccountRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailAccountRestApiIT.kt @@ -25,26 +25,26 @@ import org.opensearch.client.ResponseException import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder import org.opensearch.core.rest.RestStatus - -val SEARCH_EMAIL_ACCOUNT_DSL = """ - { - "from": 0, - "size": 20, - "sort": { "email_group.name.keyword": "desc" }, - "query": { - "bool": { - "must": { - "match_all": {} - } - } - } - } -""".trimIndent() +import kotlin.test.Test + +val SEARCH_EMAIL_ACCOUNT_DSL = + """ + { + "from": 0, + "size": 20, + "sort": { "email_group.name.keyword": "desc" }, + "query": { + "bool": { + "must": { + "match_all": {} + } + } + } + } + """.trimIndent() class SecureEmailAccountRestApiIT : AlertingRestTestCase() { - companion object { - @BeforeClass @JvmStatic fun setup() { // things to execute once and keep around for the class @@ -57,46 +57,46 @@ class SecureEmailAccountRestApiIT : AlertingRestTestCase() { @Before fun create() { - if (userClient == null) { createUser(user, arrayOf()) - userClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + userClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() } } @After fun cleanup() { - userClient?.close() deleteUser(user) } // Email account related tests. - + @Test fun `test get email accounts with an user with get email account role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_EMAIL_ACCOUNT_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_EMAIL_ACCOUNT_ACCESS), ) val emailAccount = createRandomEmailAccountWithGivenName(true, randomAlphaOfLength(5)) try { - val emailAccountResponse = userClient?.makeRequest( - "GET", - "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/${emailAccount.id}", - StringEntity( - emailAccount.toJsonString(), - ContentType.APPLICATION_JSON - ), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val emailAccountResponse = + userClient?.makeRequest( + "GET", + "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/${emailAccount.id}", + StringEntity( + emailAccount.toJsonString(), + ContentType.APPLICATION_JSON, + ), + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get Email failed", RestStatus.OK, emailAccountResponse?.restStatus()) } finally { @@ -104,25 +104,26 @@ class SecureEmailAccountRestApiIT : AlertingRestTestCase() { } } + @Test fun `test search email accounts with an user with search email account role`() { - createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_SEARCH_EMAIL_ACCOUNT_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_SEARCH_EMAIL_ACCOUNT_ACCESS), ) createRandomEmailAccountWithGivenName(true, randomAlphaOfLength(10)) try { - val searchEmailAccountResponse = userClient?.makeRequest( - "POST", - "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/_search", - StringEntity(SEARCH_EMAIL_ACCOUNT_DSL, ContentType.APPLICATION_JSON), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val searchEmailAccountResponse = + userClient?.makeRequest( + "POST", + "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/_search", + StringEntity(SEARCH_EMAIL_ACCOUNT_DSL, ContentType.APPLICATION_JSON), + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Search Email failed", RestStatus.OK, searchEmailAccountResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -132,13 +133,14 @@ class SecureEmailAccountRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 */ + @Test fun `test get email accounts with an user without get email account role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) val emailAccount = createRandomEmailAccountWithGivenName(true, randomAlphaOfLength(5)) try { @@ -147,9 +149,9 @@ class SecureEmailAccountRestApiIT : AlertingRestTestCase() { "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/${emailAccount.id}", StringEntity( emailAccount.toJsonString(), - ContentType.APPLICATION_JSON + ContentType.APPLICATION_JSON, ), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -158,13 +160,15 @@ class SecureEmailAccountRestApiIT : AlertingRestTestCase() { deleteRoleAndRoleMapping(TEST_HR_ROLE) } } + + @Test fun `test search email accounts with an user without search email account role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) createRandomEmailAccountWithGivenName(true, randomAlphaOfLength(5)) try { @@ -172,7 +176,7 @@ class SecureEmailAccountRestApiIT : AlertingRestTestCase() { "POST", "${AlertingPlugin.EMAIL_ACCOUNT_BASE_URI}/_search", StringEntity(SEARCH_EMAIL_ACCOUNT_DSL, ContentType.APPLICATION_JSON), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailGroupsRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailGroupsRestApiIT.kt index 614b9d5a6..7d3264133 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailGroupsRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureEmailGroupsRestApiIT.kt @@ -24,27 +24,28 @@ import org.opensearch.client.RestClient import org.opensearch.commons.rest.SecureRestClientBuilder import org.opensearch.core.rest.RestStatus import org.opensearch.test.junit.annotations.TestLogging - -val SEARCH_EMAIL_GROUP_DSL = """ - { - "from": 0, - "size": 20, - "sort": { "email_group.name.keyword": "desc" }, - "query": { - "bool": { - "must": { - "match_all": {} - } - } +import kotlin.test.Test + +val SEARCH_EMAIL_GROUP_DSL = + """ + { + "from": 0, + "size": 20, + "sort": { "email_group.name.keyword": "desc" }, + "query": { + "bool": { + "must": { + "match_all": {} } } -""".trimIndent() + } + } + """.trimIndent() @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class SecureEmailGroupsRestApiIT : AlertingRestTestCase() { companion object { - @BeforeClass @JvmStatic fun setup() { // things to execute once and keep around for the class @@ -57,73 +58,75 @@ class SecureEmailGroupsRestApiIT : AlertingRestTestCase() { @Before fun create() { - if (userClient == null) { createUser(user, arrayOf()) - userClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + userClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() } } @After fun cleanup() { - userClient?.close() deleteUser(user) } // Email groups related tests. - + @Test fun `test get email groups with an user with get email group role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_EMAIL_GROUP_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_EMAIL_GROUP_ACCESS), ) val emailGroup = createRandomEmailGroupWithGivenName(true, randomAlphaOfLength(5)) try { - val getEmailGroupResponse = userClient?.makeRequest( - "GET", - "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/${emailGroup.id}", - StringEntity( - emailGroup.toJsonString(), - ContentType.APPLICATION_JSON - ), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getEmailGroupResponse = + userClient?.makeRequest( + "GET", + "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/${emailGroup.id}", + StringEntity( + emailGroup.toJsonString(), + ContentType.APPLICATION_JSON, + ), + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get Email Group failed", RestStatus.OK, getEmailGroupResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) } } + @Test fun `test search email groups with an user with search email group role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_SEARCH_EMAIL_GROUP_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_SEARCH_EMAIL_GROUP_ACCESS), ) createRandomEmailGroupWithGivenName(true, randomAlphaOfLength(10)) try { - val searchEmailGroupsResponse = userClient?.makeRequest( - "POST", - "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/_search", - StringEntity( - SEARCH_EMAIL_GROUP_DSL, - ContentType.APPLICATION_JSON - ), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val searchEmailGroupsResponse = + userClient?.makeRequest( + "POST", + "${AlertingPlugin.EMAIL_GROUP_BASE_URI}/_search", + StringEntity( + SEARCH_EMAIL_GROUP_DSL, + ContentType.APPLICATION_JSON, + ), + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Search Email Group failed", RestStatus.OK, searchEmailGroupsResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt index 2f7fca92e..5024a9a03 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorRestApiIT.kt @@ -64,13 +64,12 @@ import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationB import org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.junit.annotations.TestLogging +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class SecureMonitorRestApiIT : AlertingRestTestCase() { - companion object { - @BeforeClass @JvmStatic fun setup() { // things to execute once and keep around for the class @@ -83,42 +82,44 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { @Before fun create() { - if (userClient == null) { createUser(user, arrayOf()) - userClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + userClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() } } @After fun cleanup() { - userClient?.close() deleteUser(user) } // Create Monitor related security tests + @Test fun `test create monitor with an user with alerting role`() { - createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS), ) try { // randomMonitor has a dummy user, api ignores the User passed as part of monitor, it picks user info from the logged-in user. - val monitor = randomQueryLevelMonitor().copy( - inputs = listOf( - SearchInput( - indices = listOf(TEST_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor().copy( + inputs = + listOf( + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) val createResponse = userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) assertEquals("Create monitor failed", RestStatus.CREATED, createResponse?.restStatus()) @@ -130,24 +131,27 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 - */ + */ + @Test fun `test create monitor with an user without alerting role`() { - createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) try { - val monitor = randomQueryLevelMonitor().copy( - inputs = listOf( - SearchInput( - indices = listOf(TEST_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor().copy( + inputs = + listOf( + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -157,19 +161,22 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create monitor with an user with read-only role`() { - createUserWithTestData(user, TEST_HR_INDEX, TEST_HR_ROLE, TEST_HR_BACKEND_ROLE) createUserRolesMapping(ALERTING_READ_ONLY_ACCESS, arrayOf(user)) try { - val monitor = randomQueryLevelMonitor().copy( - inputs = listOf( - SearchInput( - indices = listOf(TEST_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor().copy( + inputs = + listOf( + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -180,23 +187,25 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test query monitors with an user with only search monitor cluster permission`() { - createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_SEARCH_MONITOR_ONLY_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_SEARCH_MONITOR_ONLY_ACCESS), ) val monitor = createRandomMonitor(true) val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() - val searchResponse = client().makeRequest( - "GET", "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, searchResponse.restStatus()) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) @@ -208,24 +217,27 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 - */ + */ + @Test fun `test query monitors with an user without search monitor cluster permission`() { - createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) try { - val monitor = randomQueryLevelMonitor().copy( - inputs = listOf( - SearchInput( - indices = listOf(TEST_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor().copy( + inputs = + listOf( + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -235,23 +247,26 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create monitor with an user without index read role`() { - createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS), ) try { - val monitor = randomQueryLevelMonitor().copy( - inputs = listOf( - SearchInput( - indices = listOf(TEST_NON_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor().copy( + inputs = + listOf( + SearchInput( + indices = listOf(TEST_NON_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) val createResponse = userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) assertEquals("Create monitor failed", RestStatus.CREATED, createResponse?.restStatus()) fail("Expected 403 Method FORBIDDEN response") @@ -262,6 +277,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create monitor with disable filter by`() { disableFilterBy() val monitor = randomQueryLevelMonitor() @@ -270,24 +286,26 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { assertUserNull(createResponse.asMap()["monitor"] as HashMap) } + @Test fun `test get monitor with an user with get monitor role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS), ) val monitor = createRandomMonitor(true) try { - val getMonitorResponse = userClient?.makeRequest( - "GET", - "$ALERTING_BASE_URI/${monitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getMonitorResponse = + userClient?.makeRequest( + "GET", + "$ALERTING_BASE_URI/${monitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitor failed", RestStatus.OK, getMonitorResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -297,13 +315,14 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 */ + @Test fun `test get monitor with an user without get monitor role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) val monitor = createRandomMonitor(true) @@ -313,7 +332,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${monitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -324,14 +343,16 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } fun getDocs(response: Response?): Any? { - val hits = createParser( - XContentType.JSON.xContent(), - response?.entity?.content - ).map()["hits"]!! as Map> + val hits = + createParser( + XContentType.JSON.xContent(), + response?.entity?.content, + ).map()["hits"]!! as Map> return hits["total"]?.get("value") } // Query Monitors related security tests + @Test fun `test update monitor with disable filter by`() { disableFilterBy() val monitor = randomQueryLevelMonitor(enabled = true) @@ -347,6 +368,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { assertFalse("The monitor was not disabled", updatedMonitor.enabled) } + @Test fun `test update monitor with enable filter by`() { enableFilterBy() if (!isHttps()) { @@ -367,6 +389,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { assertFalse("The monitor was not disabled", updatedMonitor.enabled) } + @Test fun `test create monitor with enable filter by with a user have access and without role has no access`() { enableFilterBy() if (!isHttps()) { @@ -380,7 +403,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE, "role2")) @@ -396,19 +419,21 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() - - val getMonitorResponse = getUserClient?.makeRequest( - "GET", - "$ALERTING_BASE_URI/${createdMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() + + val getMonitorResponse = + getUserClient?.makeRequest( + "GET", + "$ALERTING_BASE_URI/${createdMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitor failed", RestStatus.OK, getMonitorResponse?.restStatus()) // Remove backend role and ensure no access is granted after @@ -418,7 +443,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${createdMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -430,6 +455,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create monitor with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -443,7 +469,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) try { @@ -457,6 +483,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create monitor as admin with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -470,7 +497,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(client(), monitor = monitor, listOf()) @@ -481,7 +508,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${createdMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -492,6 +519,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create monitor with enable filter by with roles user has no access and throw exception`() { enableFilterBy() if (!isHttps()) { @@ -505,7 +533,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) try { @@ -519,6 +547,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create monitor as admin with enable filter by with a user have access and without role has no access`() { enableFilterBy() if (!isHttps()) { @@ -537,15 +566,16 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS), ) - val getMonitorResponse = userClient?.makeRequest( - "GET", - "$ALERTING_BASE_URI/${createdMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getMonitorResponse = + userClient?.makeRequest( + "GET", + "$ALERTING_BASE_URI/${createdMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitor failed", RestStatus.OK, getMonitorResponse?.restStatus()) // Remove good backend role and ensure no access is granted after @@ -555,7 +585,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${createdMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -565,6 +595,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update monitor with enable filter by with removing a permission`() { enableFilterBy() if (!isHttps()) { @@ -578,7 +609,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE, "role2")) @@ -591,19 +622,21 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() - - val getMonitorResponse = getUserClient?.makeRequest( - "GET", - "$ALERTING_BASE_URI/${createdMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() + + val getMonitorResponse = + getUserClient?.makeRequest( + "GET", + "$ALERTING_BASE_URI/${createdMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitor failed", RestStatus.OK, getMonitorResponse?.restStatus()) // Remove backend role from monitor @@ -615,7 +648,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${updatedMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -629,6 +662,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update monitor with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -642,7 +676,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf("role2")) @@ -658,6 +692,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update monitor as admin with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -671,18 +706,19 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(client(), monitor = monitor, listOf(TEST_HR_BACKEND_ROLE)) assertNotNull("The monitor was not created", createdMonitor) - val getMonitorResponse = userClient?.makeRequest( - "GET", - "$ALERTING_BASE_URI/${createdMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getMonitorResponse = + userClient?.makeRequest( + "GET", + "$ALERTING_BASE_URI/${createdMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitor failed", RestStatus.OK, getMonitorResponse?.restStatus()) val updatedMonitor = updateMonitorWithClient(client(), createdMonitor, listOf()) @@ -692,7 +728,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${updatedMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -703,6 +739,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update monitor with enable filter by with updating with a permission user has no access to and throw exception`() { enableFilterBy() if (!isHttps()) { @@ -716,7 +753,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE, "role2")) @@ -729,19 +766,21 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() - - val getMonitorResponse = getUserClient?.makeRequest( - "GET", - "$ALERTING_BASE_URI/${createdMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() + + val getMonitorResponse = + getUserClient?.makeRequest( + "GET", + "$ALERTING_BASE_URI/${createdMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitor failed", RestStatus.OK, getMonitorResponse?.restStatus()) try { @@ -758,6 +797,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update monitor as another user with enable filter by with removing a permission and adding permission`() { enableFilterBy() if (!isHttps()) { @@ -771,7 +811,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE)) @@ -783,13 +823,14 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { updateUser, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role5"), - false + false, ) - val updateUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val updateUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() val updatedMonitor = updateMonitorWithClient(updateUserClient, createdMonitor, listOf("role5")) // old user should no longer have access @@ -798,7 +839,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${updatedMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -811,6 +852,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update monitor as admin with enable filter by with removing a permission`() { enableFilterBy() if (!isHttps()) { @@ -824,7 +866,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE, "role2")) @@ -837,19 +879,21 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role1", "role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() - - val getMonitorResponse = getUserClient?.makeRequest( - "GET", - "$ALERTING_BASE_URI/${createdMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_MONITOR_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() + + val getMonitorResponse = + getUserClient?.makeRequest( + "GET", + "$ALERTING_BASE_URI/${createdMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitor failed", RestStatus.OK, getMonitorResponse?.restStatus()) // Remove backend role from monitor @@ -861,7 +905,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${updatedMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -877,7 +921,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "GET", "$ALERTING_BASE_URI/${updatedMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -889,6 +933,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete monitor with disable filter by`() { disableFilterBy() val monitor = randomQueryLevelMonitor(enabled = true) @@ -902,22 +947,25 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", createdMonitor.id)).toString() // search as "admin" - must get 0 docs - val adminSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminSearchResponse.restStatus()) - val adminHits = createParser( - XContentType.JSON.xContent(), - adminSearchResponse.entity.content - ).map()["hits"]!! as Map> + val adminHits = + createParser( + XContentType.JSON.xContent(), + adminSearchResponse.entity.content, + ).map()["hits"]!! as Map> val adminDocsFound = adminHits["total"]?.get("value") assertEquals("Monitor found during search", 0, adminDocsFound) } + @Test fun `test delete monitor with enable filter by`() { enableFilterBy() if (!isHttps()) { @@ -936,18 +984,20 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", createdMonitor.id)).toString() // search as "admin" - must get 0 docs - val adminSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminSearchResponse.restStatus()) - val adminHits = createParser( - XContentType.JSON.xContent(), - adminSearchResponse.entity.content - ).map()["hits"]!! as Map> + val adminHits = + createParser( + XContentType.JSON.xContent(), + adminSearchResponse.entity.content, + ).map()["hits"]!! as Map> val adminDocsFound = adminHits["total"]?.get("value") assertEquals("Monitor found during search", 0, adminDocsFound) } @@ -955,6 +1005,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 */ + @Test fun `test query monitors with disable filter by`() { disableFilterBy() @@ -963,21 +1014,23 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() // search as "admin" - must get 1 docs - val adminSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminSearchResponse.restStatus()) assertEquals("Monitor not found during search", 1, getDocs(adminSearchResponse)) // search as userOne without alerting roles - must return 403 Forbidden try { userClient?.makeRequest( - "POST", "$ALERTING_BASE_URI/_search", + "POST", + "$ALERTING_BASE_URI/_search", emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) + StringEntity(search, ContentType.APPLICATION_JSON), ) fail("Expected 403 FORBIDDEN response") } catch (e: ResponseException) { @@ -989,15 +1042,16 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_SEARCH_MONITOR_ONLY_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_SEARCH_MONITOR_ONLY_ACCESS), ) try { - val userOneSearchResponse = userClient?.makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val userOneSearchResponse = + userClient?.makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, userOneSearchResponse?.restStatus()) assertEquals("Monitor not found during search", 1, getDocs(userOneSearchResponse)) } finally { @@ -1005,8 +1059,8 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test query monitors with enable filter by`() { - enableFilterBy() // creates monitor as "admin" user. @@ -1014,21 +1068,23 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() // search as "admin" - must get 1 docs - val adminSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminSearchResponse.restStatus()) assertEquals("Monitor not found during search", 1, getDocs(adminSearchResponse)) // search as userOne without alerting roles - must return 403 Forbidden try { userClient?.makeRequest( - "POST", "$ALERTING_BASE_URI/_search", + "POST", + "$ALERTING_BASE_URI/_search", emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) + StringEntity(search, ContentType.APPLICATION_JSON), ) fail("Expected 403 FORBIDDEN response") } catch (e: ResponseException) { @@ -1038,12 +1094,13 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { // add alerting roles and search as userOne - must return 0 docs createUserRolesMapping(ALERTING_FULL_ACCESS_ROLE, arrayOf(user)) try { - val userOneSearchResponse = userClient?.makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val userOneSearchResponse = + userClient?.makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, userOneSearchResponse?.restStatus()) assertEquals("Monitor not found during search", 0, getDocs(userOneSearchResponse)) } finally { @@ -1051,23 +1108,25 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test execute monitor with an user with execute monitor access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_EXECUTE_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_EXECUTE_MONITOR_ACCESS), ) val monitor = createRandomMonitor(true) try { - val executeMonitorResponse = userClient?.makeRequest( - "POST", - "$ALERTING_BASE_URI/${monitor.id}/_execute", - mutableMapOf() - ) + val executeMonitorResponse = + userClient?.makeRequest( + "POST", + "$ALERTING_BASE_URI/${monitor.id}/_execute", + mutableMapOf(), + ) assertEquals("Get monitor failed", RestStatus.OK, executeMonitorResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -1076,24 +1135,24 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 - */ + */ + @Test fun `test execute monitor with an user without execute monitor access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) val monitor = createRandomMonitor(true) try { - userClient?.makeRequest( "POST", "$ALERTING_BASE_URI/${monitor.id}/_execute", - mutableMapOf() + mutableMapOf(), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -1103,25 +1162,27 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete monitor with an user with delete monitor access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_DELETE_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_DELETE_MONITOR_ACCESS), ) val monitor = createRandomMonitor(true) val refresh = true try { - val deleteMonitorResponse = userClient?.makeRequest( - "DELETE", - "$ALERTING_BASE_URI/${monitor.id}?refresh=$refresh", - emptyMap(), - monitor.toHttpEntity() - ) + val deleteMonitorResponse = + userClient?.makeRequest( + "DELETE", + "$ALERTING_BASE_URI/${monitor.id}?refresh=$refresh", + emptyMap(), + monitor.toHttpEntity(), + ) assertEquals("Get monitor failed", RestStatus.OK, deleteMonitorResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -1131,13 +1192,14 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 */ + @Test fun `test delete monitor with an user without delete monitor access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) val monitor = createRandomMonitor(true) @@ -1148,7 +1210,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { "DELETE", "$ALERTING_BASE_URI/${monitor.id}?refresh=$refresh", emptyMap(), - monitor.toHttpEntity() + monitor.toHttpEntity(), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -1158,8 +1220,8 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test query all alerts in all states with disabled filter by`() { - disableFilterBy() putAlertMappings() val monitor = createRandomMonitor(refresh = true) @@ -1194,8 +1256,8 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test query all alerts in all states with filter by`() { - enableFilterBy() putAlertMappings() val adminUser = User(ADMIN, listOf(ADMIN), listOf(ALL_ACCESS_ROLE), listOf()) @@ -1230,6 +1292,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test query all alerts in all states with filter by1`() { enableFilterBy() putAlertMappings() @@ -1274,8 +1337,8 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test get alerts with an user with get alerts role`() { - putAlertMappings() val ackAlertsUser = User(ADMIN, listOf(ADMIN), listOf(ALERTING_GET_ALERTS_ACCESS), listOf()) var monitor = createRandomMonitor(refresh = true).copy(user = ackAlertsUser) @@ -1298,7 +1361,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_ALERTS_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_ALERTS_ACCESS), ) try { val responseMap = getAlerts(userClient as RestClient, inputMap).asMap() @@ -1309,20 +1372,21 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } // Execute Monitor related security tests - + @Test fun `test execute monitor with elevate permissions`() { - val action = randomAction(template = randomTemplateScript("Hello {{ctx.monitor.name}}"), destinationId = createDestination().id) - val inputs = listOf( - SearchInput( - indices = kotlin.collections.listOf(TEST_NON_HR_INDEX), - query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) + val inputs = + listOf( + SearchInput( + indices = kotlin.collections.listOf(TEST_NON_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ) + val monitor = + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), + inputs = inputs, ) - ) - val monitor = randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = ALWAYS_RUN, actions = listOf(action))), - inputs = inputs - ) // Make sure the elevating the permissions fails execute. val adminUser = User(ADMIN, listOf(ADMIN), listOf(ALL_ACCESS_ROLE), listOf()) @@ -1340,57 +1404,66 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test admin all access with enable filter by`() { - enableFilterBy() createUserWithTestData(user, TEST_HR_INDEX, TEST_HR_ROLE, TEST_HR_BACKEND_ROLE) createUserRolesMapping(ALERTING_FULL_ACCESS_ROLE, arrayOf(user)) try { // randomMonitor has a dummy user, api ignores the User passed as part of monitor, it picks user info from the logged-in user. - val monitor = randomQueryLevelMonitor().copy( - inputs = listOf( - SearchInput( - indices = listOf(TEST_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor().copy( + inputs = + listOf( + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) val createResponse = userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) assertEquals("Create monitor failed", RestStatus.CREATED, createResponse?.restStatus()) - val monitorJson = JsonXContent.jsonXContent.createParser( - NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, - createResponse?.entity?.content - ).map() + val monitorJson = + JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + createResponse?.entity?.content, + ).map() val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitorJson["_id"])).toString() // search as "admin" - must get 1 docs - val adminSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminSearchResponse.restStatus()) assertEquals("Monitor not found during search", 1, getDocs(adminSearchResponse)) // get as "admin" - must get 1 docs val id: String = monitorJson["_id"] as String - val adminGetResponse = client().makeRequest( - "GET", - "$ALERTING_BASE_URI/$id", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminGetResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/$id", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Get monitor failed", RestStatus.OK, adminGetResponse.restStatus()) // delete as "admin" - val adminDeleteResponse = client().makeRequest( - "DELETE", - "$ALERTING_BASE_URI/$id", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminDeleteResponse = + client().makeRequest( + "DELETE", + "$ALERTING_BASE_URI/$id", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Delete monitor failed", RestStatus.OK, adminDeleteResponse.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -1401,6 +1474,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 */ + @Test fun `test execute query-level monitor with user role using static DLS`() { createUser(user, arrayOf(TEST_HR_BACKEND_ROLE)) createTestIndex(TEST_HR_INDEX) @@ -1408,43 +1482,47 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_ROLE, TEST_HR_INDEX, TERM_DLS_QUERY, - getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS), ) createUserRolesMapping(TEST_HR_ROLE, arrayOf(user)) // Add a doc that is accessible to the user indexDoc( - TEST_HR_INDEX, "1", + TEST_HR_INDEX, + "1", """ { "test_field": "a", "accessible": true } - """.trimIndent() + """.trimIndent(), ) // Add a second doc that is not accessible to the user indexDoc( - TEST_HR_INDEX, "2", + TEST_HR_INDEX, + "2", """ { "test_field": "b", "accessible": false } - """.trimIndent() + """.trimIndent(), ) val input = SearchInput(indices = listOf(TEST_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) - val triggerScript = """ + val triggerScript = + """ // make sure there is exactly one hit return ctx.results[0].hits.hits.size() == 1 - """.trimIndent() + """.trimIndent() val trigger = randomQueryLevelTrigger(condition = Script(triggerScript)).copy(actions = listOf()) - val monitor = createMonitorWithClient( - userClient!!, - randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger)) - ) + val monitor = + createMonitorWithClient( + userClient!!, + randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger)), + ) try { executeMonitor(monitor.id) @@ -1455,6 +1533,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test execute query-level monitor with user role using dynamic DLS with parameter substitution`() { createUserWithAttributes(user, arrayOf(TEST_HR_BACKEND_ROLE), mapOf("team" to "red")) createTestIndex(TEST_HR_INDEX) @@ -1465,43 +1544,47 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_ROLE, TEST_HR_INDEX, dlsQuery, - getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS), ) createUserRolesMapping(TEST_HR_ROLE, arrayOf(user)) // Add a doc that is accessible to the user indexDoc( - TEST_HR_INDEX, "1", + TEST_HR_INDEX, + "1", """ { "test_field": "a", "team": "red" } - """.trimIndent() + """.trimIndent(), ) // Add a second doc that is not accessible to the user indexDoc( - TEST_HR_INDEX, "2", + TEST_HR_INDEX, + "2", """ { "test_field": "b", "team": "blue" } - """.trimIndent() + """.trimIndent(), ) val input = SearchInput(indices = listOf(TEST_HR_INDEX), query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) - val triggerScript = """ + val triggerScript = + """ // make sure there is exactly one hit return ctx.results[0].hits.hits.size() == 1 - """.trimIndent() + """.trimIndent() val trigger = randomQueryLevelTrigger(condition = Script(triggerScript)).copy(actions = listOf()) - val monitor = createMonitorWithClient( - userClient!!, - randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger)) - ) + val monitor = + createMonitorWithClient( + userClient!!, + randomQueryLevelMonitor(inputs = listOf(input), triggers = listOf(trigger)), + ) try { executeMonitor(monitor.id) @@ -1512,6 +1595,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { } } + @Test fun `test execute bucket-level monitor with user having partial index permissions`() { createUser(user, arrayOf(TEST_HR_BACKEND_ROLE)) createTestIndex(TEST_HR_INDEX) @@ -1519,59 +1603,66 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { TEST_HR_ROLE, TEST_HR_INDEX, TERM_DLS_QUERY, - getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS), ) createUserRolesMapping(TEST_HR_ROLE, arrayOf(user)) // Add a doc that is accessible to the user indexDoc( - TEST_HR_INDEX, "1", + TEST_HR_INDEX, + "1", """ { "test_field": "a", "accessible": true } - """.trimIndent() + """.trimIndent(), ) // Add a second doc that is not accessible to the user indexDoc( - TEST_HR_INDEX, "2", + TEST_HR_INDEX, + "2", """ { "test_field": "b", "accessible": false } - """.trimIndent() - ) - - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") + """.trimIndent(), ) + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) - val input = SearchInput( - indices = listOf(TEST_HR_INDEX), - query = SearchSourceBuilder().size(0).query(QueryBuilders.matchAllQuery()).aggregation(compositeAgg) - ) - val triggerScript = """ + val input = + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().size(0).query(QueryBuilders.matchAllQuery()).aggregation(compositeAgg), + ) + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null - ), - actions = listOf() - ) - val monitor = createMonitorWithClient( - userClient!!, - randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger)) - ) + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), + actions = listOf(), + ) + val monitor = + createMonitorWithClient( + userClient!!, + randomBucketLevelMonitor(inputs = listOf(input), enabled = false, triggers = listOf(trigger)), + ) try { executeMonitor(monitor.id) @@ -1585,6 +1676,7 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { /** * We want to verify that user roles/permissions do not affect clean up of monitors during partial monitor creation failure */ + @Test fun `test create monitor failure clean up with a user without delete monitor access`() { enableFilterBy() createUser(user, listOf(TEST_HR_BACKEND_ROLE, "role2").toTypedArray()) @@ -1592,49 +1684,53 @@ class SecureMonitorRestApiIT : AlertingRestTestCase() { createCustomIndexRole( ALERTING_INDEX_MONITOR_ACCESS, TEST_HR_INDEX, - getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_INDEX_MONITOR_ACCESS), ) createUserWithRoles( user, listOf(ALERTING_INDEX_MONITOR_ACCESS, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val docLevelQueryIndex = ".opensearch-alerting-queries-000001" createIndex( - docLevelQueryIndex, Settings.EMPTY, + docLevelQueryIndex, + Settings.EMPTY, """ - "properties" : { - "query": { - "type": "percolator_ext" - }, - "monitor_id": { - "type": "text" - }, - "index": { - "type": "text" - } - } - } + "properties" : { + "query": { + "type": "percolator_ext" + }, + "monitor_id": { + "type": "text" + }, + "index": { + "type": "text" + } + } + } """.trimIndent(), - ".opensearch-alerting-queries" + ".opensearch-alerting-queries", ) closeIndex(docLevelQueryIndex) // close index to simulate doc level query indexing failure try { - val monitor = randomDocumentLevelMonitor( - withMetadata = false, - triggers = listOf(), - inputs = listOf(DocLevelMonitorInput("description", listOf(TEST_HR_INDEX), emptyList())) - ) + val monitor = + randomDocumentLevelMonitor( + withMetadata = false, + triggers = listOf(), + inputs = listOf(DocLevelMonitorInput("description", listOf(TEST_HR_INDEX), emptyList())), + ) userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) fail("Monitor creation should have failed due to error in indexing doc level queries") } catch (e: ResponseException) { val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).size(10).toString() - val searchResponse = client().makeRequest( - "GET", "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val searchResponse = + client().makeRequest( + "GET", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) val xcp = createParser(XContentType.JSON.xContent(), searchResponse.entity.content) val hits = xcp.map()["hits"]!! as Map> val numberDocsFound = hits["total"]?.get("value") diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorV2RestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorV2RestApiIT.kt index ef82094bb..ceac35c59 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorV2RestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureMonitorV2RestApiIT.kt @@ -38,6 +38,7 @@ import org.opensearch.core.rest.RestStatus import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder import java.time.temporal.ChronoUnit.MINUTES +import kotlin.test.Test /*** * Tests Alerting V2 CRUD with role-based access control @@ -47,7 +48,6 @@ import java.time.temporal.ChronoUnit.MINUTES * --tests "org.opensearch.alerting.resthandler.SecureMonitorV2RestApiIT" */ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { - companion object { @BeforeClass @JvmStatic fun setup() { @@ -64,10 +64,11 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { client().updateSettings(AlertingV2Settings.ALERTING_V2_ENABLED.key, "true") if (userClient == null) { createUser(user, arrayOf()) - userClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + userClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), user, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() } } @@ -77,6 +78,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { deleteUser(user) } + @Test fun `test create monitor as user without alerting access fails`() { if (!isHttps()) { return @@ -89,13 +91,13 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { TEST_INDEX_NAME, "custom_role", listOf(), - null + null, ) try { createMonitorV2WithClient( userClient!!, - monitorV2 = pplMonitorConfig + monitorV2 = pplMonitorConfig, ) fail("Expected create monitor to fail as user does not have permissions to call alerting APIs") } catch (e: ResponseException) { @@ -105,6 +107,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test create monitor that queries index user doesn't have access to fails`() { if (!isHttps()) { return @@ -112,22 +115,23 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { createIndex("some_index", Settings.EMPTY) - val pplMonitorConfig = randomPPLMonitor( - query = "source = some_index | head 10" - ) + val pplMonitorConfig = + randomPPLMonitor( + query = "source = some_index | head 10", + ) createUserWithTestDataAndCustomRole( user, "other_index", "custom_role", listOf(), - getClusterPermissionsFromCustomRole(ALL_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALL_ACCESS_ROLE), ) try { createMonitorV2WithClient( userClient!!, - monitorV2 = pplMonitorConfig + monitorV2 = pplMonitorConfig, ) fail("Expected create monitor to fail as user does not have permissions to index that monitor queries") } catch (e: ResponseException) { @@ -137,6 +141,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test update monitor that queries index user doesn't have access to fails`() { if (!isHttps()) { return @@ -153,7 +158,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf(backendRole), - false + false, ) // this function automatically creates index TEST_INDEX_NAME, then a monitor that queries it @@ -175,13 +180,14 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { "unrelated_index", "unrelated_role", listOf(backendRole), - listOf(ROLE_TO_PERMISSION_MAPPING[ALL_ACCESS_ROLE]) + listOf(ROLE_TO_PERMISSION_MAPPING[ALL_ACCESS_ROLE]), ) - val noIndicesUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), noIndicesUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val noIndicesUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), noIndicesUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() // update some field that isn't the PPL query and the index it's querying val newMonitor = pplMonitorConfig.makeCopy(name = "some_random_name") @@ -194,7 +200,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { "PUT", "$MONITOR_V2_BASE_URI/${pplMonitor.id}", newMonitor.toHttpEntity(), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected update monitor to fail as user does not have permissions to index that monitor queries") } catch (e: ResponseException) { @@ -205,6 +211,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { noIndicesUserClient.close() } + @Test fun `test RBAC create monitor with backend roles user has access to succeeds`() { enableFilterBy() if (!isHttps()) { @@ -217,7 +224,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) createMonitorV2WithClient(userClient!!, monitorV2 = pplMonitorConfig, listOf("backend_role_a")) @@ -225,6 +232,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(1) } + @Test fun `test RBAC create monitor with backend roles user has no access to fails`() { enableFilterBy() if (!isHttps()) { @@ -237,14 +245,14 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) try { createMonitorV2WithClient( userClient!!, monitorV2 = pplMonitorConfig, - listOf("backend_role_a", "backend_role_b", "backend_role_c") + listOf("backend_role_a", "backend_role_b", "backend_role_c"), ) fail("Expected create monitor to fail as user does not have backend_role_c backend role") } catch (e: ResponseException) { @@ -254,6 +262,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { ensureNumMonitorV2s(0) } + @Test fun `test RBAC update monitorV2 as user with correct backend roles succeeds`() { enableFilterBy() if (!isHttps()) { @@ -265,7 +274,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -277,27 +286,30 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { updateUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a"), - true + true, ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() val newMonitor = randomPPLMonitor() - val updateMonitorResponse = getUserClient!!.makeRequest( - "PUT", - "$MONITOR_V2_BASE_URI/${pplMonitor.id}", - newMonitor.toHttpEntity(), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val updateMonitorResponse = + getUserClient!!.makeRequest( + "PUT", + "$MONITOR_V2_BASE_URI/${pplMonitor.id}", + newMonitor.toHttpEntity(), + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Update monitorV2 failed", RestStatus.OK, updateMonitorResponse.restStatus()) // cleanup getUserClient.close() } + @Test fun `test RBAC update monitorV2 as user without correct backend roles fails`() { enableFilterBy() if (!isHttps()) { @@ -309,7 +321,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -321,13 +333,14 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { updateUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_c"), - true + true, ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() val newMonitor = randomPPLMonitor() @@ -336,7 +349,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { "PUT", "$MONITOR_V2_BASE_URI/${pplMonitor.id}", newMonitor.toHttpEntity(), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected update monitor to fail as user does not have the correct backend roles") } catch (e: ResponseException) { @@ -347,6 +360,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { getUserClient.close() } + @Test fun `test RBAC get monitorV2 as user with correct backend roles succeeds`() { enableFilterBy() if (!isHttps()) { @@ -358,7 +372,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -370,26 +384,29 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { getUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a"), - true + true, ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() - val getMonitorResponse = getUserClient!!.makeRequest( - "GET", - "$MONITOR_V2_BASE_URI/${pplMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getMonitorResponse = + getUserClient!!.makeRequest( + "GET", + "$MONITOR_V2_BASE_URI/${pplMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitorV2 failed", RestStatus.OK, getMonitorResponse.restStatus()) // cleanup getUserClient.close() } + @Test fun `test RBAC get monitorV2 as user without correct backend roles fails`() { enableFilterBy() if (!isHttps()) { @@ -401,7 +418,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -413,20 +430,21 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { getUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_c"), - true + true, ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() try { getUserClient!!.makeRequest( "GET", "$MONITOR_V2_BASE_URI/${pplMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -436,6 +454,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { } } + @Test fun `test RBAC search monitorV2 as user with correct backend roles returns results`() { enableFilterBy() if (!isHttps()) { @@ -447,7 +466,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -459,21 +478,23 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { searchUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a"), - true + true, ) - val searchUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), searchUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val searchUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), searchUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() - val searchMonitorResponse = searchUserClient!!.makeRequest( - "POST", - "$MONITOR_V2_BASE_URI/_search", - StringEntity(search, ContentType.APPLICATION_JSON), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val searchMonitorResponse = + searchUserClient!!.makeRequest( + "POST", + "$MONITOR_V2_BASE_URI/_search", + StringEntity(search, ContentType.APPLICATION_JSON), + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Search monitorV2 failed", RestStatus.OK, searchMonitorResponse.restStatus()) createParser(XContentType.JSON.xContent(), searchMonitorResponse.entity.content).use { xcp -> @@ -487,6 +508,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { searchUserClient.close() } + @Test fun `test RBAC search monitorV2 as user without correct backend roles returns no results`() { enableFilterBy() if (!isHttps()) { @@ -498,7 +520,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -510,21 +532,23 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { searchUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_c"), - true + true, ) - val searchUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), searchUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val searchUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), searchUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() val search = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).toString() - val searchMonitorResponse = searchUserClient!!.makeRequest( - "POST", - "$MONITOR_V2_BASE_URI/_search", - StringEntity(search, ContentType.APPLICATION_JSON), - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val searchMonitorResponse = + searchUserClient!!.makeRequest( + "POST", + "$MONITOR_V2_BASE_URI/_search", + StringEntity(search, ContentType.APPLICATION_JSON), + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Search monitorV2 failed", RestStatus.OK, searchMonitorResponse.restStatus()) createParser(XContentType.JSON.xContent(), searchMonitorResponse.entity.content).use { xcp -> @@ -537,6 +561,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { searchUserClient.close() } + @Test fun `test RBAC execute monitorV2 as user with correct backend roles succeeds`() { enableFilterBy() if (!isHttps()) { @@ -548,7 +573,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -560,26 +585,29 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { executeUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a"), - true + true, ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), executeUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), executeUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() - val getMonitorResponse = getUserClient!!.makeRequest( - "POST", - "$MONITOR_V2_BASE_URI/${pplMonitor.id}/_execute", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getMonitorResponse = + getUserClient!!.makeRequest( + "POST", + "$MONITOR_V2_BASE_URI/${pplMonitor.id}/_execute", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitorV2 failed", RestStatus.OK, getMonitorResponse.restStatus()) // cleanup getUserClient.close() } + @Test fun `test RBAC execute monitorV2 as user without correct backend roles fails`() { enableFilterBy() if (!isHttps()) { @@ -591,7 +619,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -603,20 +631,21 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { executeUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_c"), - true + true, ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), executeUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), executeUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() try { getUserClient!!.makeRequest( "POST", "$MONITOR_V2_BASE_URI/${pplMonitor.id}/_execute", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -626,6 +655,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { } } + @Test fun `test RBAC get alerts v2 as user with correct backend roles succeeds`() { enableFilterBy() if (!isHttps()) { @@ -635,38 +665,41 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(2, MINUTES, "abc", 5) - val pplMonitorConfig = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = null, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitorConfig = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = null, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) createUserWithRoles( user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) - val pplMonitor = createMonitorV2WithClient( - userClient!!, - pplMonitorConfig, - null - ) as PPLSQLMonitor + val pplMonitor = + createMonitorV2WithClient( + userClient!!, + pplMonitorConfig, + null, + ) as PPLSQLMonitor val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -682,20 +715,22 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { getAlertsUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a"), - true + true, ) - val getAlertsUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getAlertsUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getAlertsUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getAlertsUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() - val getAlertsResponse = getAlertsUserClient!!.makeRequest( - "GET", - "$MONITOR_V2_BASE_URI/alerts", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getAlertsResponse = + getAlertsUserClient!!.makeRequest( + "GET", + "$MONITOR_V2_BASE_URI/alerts", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get alerts v2 failed", RestStatus.OK, getAlertsResponse.restStatus()) val alertsGenerated = numAlerts(getAlertsResponse) > 0 @@ -705,6 +740,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { getAlertsUserClient.close() } + @Test fun `test RBAC get alerts v2 as user without correct backend roles fails`() { enableFilterBy() if (!isHttps()) { @@ -714,38 +750,41 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { createIndex(TEST_INDEX_NAME, Settings.EMPTY, TEST_INDEX_MAPPINGS) indexDocFromSomeTimeAgo(2, MINUTES, "abc", 5) - val pplMonitorConfig = createRandomPPLMonitor( - randomPPLMonitor( - enabled = true, - schedule = IntervalSchedule(interval = 1, unit = MINUTES), - lookBackWindow = null, - triggers = listOf( - randomPPLTrigger( - throttleDuration = null, - expireDuration = 5, - mode = TriggerMode.RESULT_SET, - conditionType = ConditionType.NUMBER_OF_RESULTS, - numResultsCondition = NumResultsCondition.GREATER_THAN, - numResultsValue = 0L, - customCondition = null - ) + val pplMonitorConfig = + createRandomPPLMonitor( + randomPPLMonitor( + enabled = true, + schedule = IntervalSchedule(interval = 1, unit = MINUTES), + lookBackWindow = null, + triggers = + listOf( + randomPPLTrigger( + throttleDuration = null, + expireDuration = 5, + mode = TriggerMode.RESULT_SET, + conditionType = ConditionType.NUMBER_OF_RESULTS, + numResultsCondition = NumResultsCondition.GREATER_THAN, + numResultsValue = 0L, + customCondition = null, + ), + ), + query = "source = $TEST_INDEX_NAME | head 10", ), - query = "source = $TEST_INDEX_NAME | head 10" ) - ) createUserWithRoles( user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) - val pplMonitor = createMonitorV2WithClient( - userClient!!, - pplMonitorConfig, - null - ) as PPLSQLMonitor + val pplMonitor = + createMonitorV2WithClient( + userClient!!, + pplMonitorConfig, + null, + ) as PPLSQLMonitor val executeResponse = executeMonitorV2(pplMonitor.id) val triggered = isTriggered(pplMonitor, executeResponse) @@ -757,20 +796,22 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { getAlertsUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_c"), - true + true, ) - val getAlertsUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getAlertsUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val getAlertsUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getAlertsUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() - val getAlertsResponse = getAlertsUserClient!!.makeRequest( - "GET", - "$MONITOR_V2_BASE_URI/alerts", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getAlertsResponse = + getAlertsUserClient!!.makeRequest( + "GET", + "$MONITOR_V2_BASE_URI/alerts", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get alerts v2 failed", RestStatus.OK, getAlertsResponse.restStatus()) val alertsGenerated = numAlerts(getAlertsResponse) > 0 @@ -780,6 +821,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { getAlertsUserClient.close() } + @Test fun `test RBAC delete monitorV2 as user with correct backend roles succeeds`() { enableFilterBy() if (!isHttps()) { @@ -791,7 +833,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -803,20 +845,22 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { deleteUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a"), - true + true, ) - val deleteUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), deleteUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val deleteUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), deleteUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() - val getMonitorResponse = deleteUserClient!!.makeRequest( - "DELETE", - "$MONITOR_V2_BASE_URI/${pplMonitor.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getMonitorResponse = + deleteUserClient!!.makeRequest( + "DELETE", + "$MONITOR_V2_BASE_URI/${pplMonitor.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get monitorV2 failed", RestStatus.OK, getMonitorResponse.restStatus()) ensureNumMonitorV2s(0) @@ -825,6 +869,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { deleteUserClient.close() } + @Test fun `test RBAC delete monitorV2 as user without correct backend roles fails`() { enableFilterBy() if (!isHttps()) { @@ -836,7 +881,7 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_a", "backend_role_b"), - false + false, ) val pplMonitor = createMonitorV2WithClient(userClient!!, pplMonitorConfig, listOf("backend_role_a", "backend_role_b")) @@ -848,20 +893,21 @@ class SecureMonitorV2RestApiIT : AlertingRestTestCase() { deleteUser, listOf(ALERTING_FULL_ACCESS_ROLE, PPL_FULL_ACCESS_ROLE), listOf("backend_role_c"), - true + true, ) - val deleteUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), deleteUser, password) - .setSocketTimeout(60000) - .setConnectionRequestTimeout(180000) - .build() + val deleteUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), deleteUser, password) + .setSocketTimeout(60000) + .setConnectionRequestTimeout(180000) + .build() try { deleteUserClient!!.makeRequest( "DELETE", "$MONITOR_V2_BASE_URI/${pplMonitor.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureWorkflowRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureWorkflowRestApiIT.kt index 25b49d3dd..b4bd70d2e 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureWorkflowRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/SecureWorkflowRestApiIT.kt @@ -61,15 +61,14 @@ import org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuil import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.junit.annotations.TestLogging import java.time.Instant +import kotlin.test.Test // TODO investigate flaky nature of tests. not reproducible in local but fails in jenkins CI @AwaitsFix(bugUrl = "https://github.com/opensearch-project/alerting/issues/1246") @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class SecureWorkflowRestApiIT : AlertingRestTestCase() { - companion object { - @BeforeClass @JvmStatic fun setup() { @@ -96,29 +95,33 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } // Create Workflow related security tests + @Test fun `test create workflow with an user with alerting role`() { - val clusterPermissions = listOf( - getClusterPermissionsFromCustomRole(ALERTING_INDEX_WORKFLOW_ACCESS) - ) + val clusterPermissions = + listOf( + getClusterPermissionsFromCustomRole(ALERTING_INDEX_WORKFLOW_ACCESS), + ) createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - clusterPermissions + clusterPermissions, ) try { - val monitor = createMonitor( - randomQueryLevelMonitor( - inputs = listOf(SearchInput(listOf(TEST_HR_INDEX), SearchSourceBuilder().query(QueryBuilders.matchAllQuery()))), - ), - true - ) + val monitor = + createMonitor( + randomQueryLevelMonitor( + inputs = listOf(SearchInput(listOf(TEST_HR_INDEX), SearchSourceBuilder().query(QueryBuilders.matchAllQuery()))), + ), + true, + ) - val workflow = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) val createResponse = userClient?.makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) assertEquals("Create workflow failed", RestStatus.CREATED, createResponse?.restStatus()) @@ -129,20 +132,22 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow with an user without alerting role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) try { val monitor = createRandomMonitor(true) - val workflow = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) userClient?.makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) fail("Expected 403 Method FORBIDDEN response") @@ -153,15 +158,17 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow with an user with read-only role`() { createUserWithTestData(user, TEST_HR_INDEX, TEST_HR_ROLE, TEST_HR_BACKEND_ROLE) createUserRolesMapping(ALERTING_READ_ONLY_ACCESS, arrayOf(user)) try { val monitor = createRandomMonitor(true) - val workflow = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) userClient?.makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -172,38 +179,43 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow with delegate with an user without index read role`() { createTestIndex(TEST_NON_HR_INDEX) - val clusterPermissions = listOf( - getClusterPermissionsFromCustomRole(ALERTING_INDEX_WORKFLOW_ACCESS) - ) + val clusterPermissions = + listOf( + getClusterPermissionsFromCustomRole(ALERTING_INDEX_WORKFLOW_ACCESS), + ) createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - clusterPermissions + clusterPermissions, ) try { val query = randomDocLevelQuery(tags = listOf()) val triggers = listOf(randomDocumentLevelTrigger(condition = Script("query[id=\"${query.id}\"]"))) - val monitor = createMonitor( - randomDocumentLevelMonitor( - inputs = listOf( - DocLevelMonitorInput( - indices = listOf(TEST_NON_HR_INDEX), - queries = listOf(query) - ) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + inputs = + listOf( + DocLevelMonitorInput( + indices = listOf(TEST_NON_HR_INDEX), + queries = listOf(query), + ), + ), + triggers = triggers, ), - triggers = triggers - ), - true - ) + true, + ) - val workflow = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) userClient?.makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) } catch (e: ResponseException) { @@ -214,36 +226,40 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow with disable filter by`() { disableFilterBy() val monitor = createRandomMonitor(true) - val workflow = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) val createResponse = client().makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) assertEquals("Create workflow failed", RestStatus.CREATED, createResponse.restStatus()) assertUserNull(createResponse.asMap()["workflow"] as HashMap) } + @Test fun `test get workflow with an user with get workflow role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS), ) val monitor = createRandomMonitor(true) val workflow = createWorkflow(randomWorkflow(monitorIds = listOf(monitor.id))) try { - val getWorkflowResponse = userClient?.makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getWorkflowResponse = + userClient?.makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get workflow failed", RestStatus.OK, getWorkflowResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -253,13 +269,14 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { /* TODO: https://github.com/opensearch-project/alerting/issues/300 */ + @Test fun `test get workflow with an user without get monitor role`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) val monitor = createRandomMonitor(true) @@ -270,7 +287,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -281,21 +298,24 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } fun getDocs(response: Response?): Any? { - val hits = createParser( - XContentType.JSON.xContent(), - response?.entity?.content - ).map()["hits"]!! as Map> + val hits = + createParser( + XContentType.JSON.xContent(), + response?.entity?.content, + ).map()["hits"]!! as Map> return hits["total"]?.get("value") } // Query Monitors related security tests + @Test fun `test update workflow with disable filter by`() { disableFilterBy() val createdMonitor = createMonitor(monitor = randomQueryLevelMonitor(enabled = true)) - val createdWorkflow = createWorkflow( - randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true, enabledTime = Instant.now()) - ) + val createdWorkflow = + createWorkflow( + randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true, enabledTime = Instant.now()), + ) assertNotNull("The workflow was not created", createdWorkflow) assertTrue("The workflow was not enabled", createdWorkflow.enabled) @@ -306,6 +326,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { assertFalse("The monitor was not disabled", updatedWorkflow.enabled) } + @Test fun `test update workflow with enable filter by`() { enableFilterBy() if (!isHttps()) { @@ -314,14 +335,16 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { return } - val createdMonitor = createMonitorWithClient( - client = client(), - monitor = randomQueryLevelMonitor(enabled = true), - rbacRoles = listOf("admin") - ) - val createdWorkflow = createWorkflow( - randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true, enabledTime = Instant.now()) - ) + val createdMonitor = + createMonitorWithClient( + client = client(), + monitor = randomQueryLevelMonitor(enabled = true), + rbacRoles = listOf("admin"), + ) + val createdWorkflow = + createWorkflow( + randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true, enabledTime = Instant.now()), + ) assertNotNull("The workflow was not created", createdWorkflow) assertTrue("The workflow was not enabled", createdWorkflow.enabled) @@ -332,6 +355,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { assertFalse("The monitor was not disabled", updatedWorkflow.enabled) } + @Test fun `test create workflow with enable filter by with a user have access and without role has no access`() { enableFilterBy() if (!isHttps()) { @@ -342,22 +366,24 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) - val createdMonitor = createMonitorWithClient( - userClient!!, - monitor = randomQueryLevelMonitor(enabled = true), - listOf(TEST_HR_BACKEND_ROLE, "role2") - ) + val createdMonitor = + createMonitorWithClient( + userClient!!, + monitor = randomQueryLevelMonitor(enabled = true), + listOf(TEST_HR_BACKEND_ROLE, "role2"), + ) assertNotNull("The monitor was not created", createdMonitor) - val createdWorkflow = createWorkflowWithClient( - userClient!!, - workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true), - listOf(TEST_HR_BACKEND_ROLE, "role2") - ) + val createdWorkflow = + createWorkflowWithClient( + userClient!!, + workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true), + listOf(TEST_HR_BACKEND_ROLE, "role2"), + ) assertNotNull("The workflow was not created", createdWorkflow) createUserRolesMapping(ALERTING_FULL_ACCESS_ROLE, arrayOf()) @@ -370,17 +396,20 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000).build() - - val getWorkflowResponse = getUserClient?.makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .build() + + val getWorkflowResponse = + getUserClient?.makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get workflow failed", RestStatus.OK, getWorkflowResponse?.restStatus()) // Remove backend role and ensure no access is granted after @@ -390,7 +419,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -403,6 +432,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow with enable filter by with a user with a backend role doesn't have access to monitor`() { enableFilterBy() if (!isHttps()) { @@ -413,14 +443,15 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) - val createdMonitor = createMonitorWithClient( - userClient!!, - monitor = randomQueryLevelMonitor(enabled = true), - listOf("role2") - ) + val createdMonitor = + createMonitorWithClient( + userClient!!, + monitor = randomQueryLevelMonitor(enabled = true), + listOf("role2"), + ) assertNotNull("The monitor was not created", createdMonitor) @@ -430,19 +461,23 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { userWithDifferentRole, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role3"), - false + false, ) - val userWithDifferentRoleClient = SecureRestClientBuilder( - clusterHosts.toTypedArray(), isHttps(), userWithDifferentRole, password - ) - .setSocketTimeout(60000).build() + val userWithDifferentRoleClient = + SecureRestClientBuilder( + clusterHosts.toTypedArray(), + isHttps(), + userWithDifferentRole, + password, + ).setSocketTimeout(60000) + .build() try { createWorkflowWithClient( userWithDifferentRoleClient!!, workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true), - listOf("role3") + listOf("role3"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -454,6 +489,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -469,7 +505,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) try { @@ -483,6 +519,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow as admin with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -496,7 +533,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitor(monitor = monitor) @@ -504,12 +541,11 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { assertNotNull("The workflow was not created", createdWorkflow) try { - userClient?.makeRequest( "GET", "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -520,6 +556,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow with enable filter by with roles user has no access and throw exception`() { enableFilterBy() if (!isHttps()) { @@ -534,7 +571,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) try { @@ -548,6 +585,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test create workflow as admin with enable filter by with a user have access and without role has no access`() { enableFilterBy() if (!isHttps()) { @@ -558,11 +596,12 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { val monitor = randomQueryLevelMonitor(enabled = true) val createdMonitor = createMonitorWithClient(client(), monitor = monitor, listOf(TEST_HR_BACKEND_ROLE, "role1", "role2")) - val createdWorkflow = createWorkflowWithClient( - client(), - randomWorkflow(monitorIds = listOf(createdMonitor.id)), - listOf(TEST_HR_BACKEND_ROLE, "role1", "role2") - ) + val createdWorkflow = + createWorkflowWithClient( + client(), + randomWorkflow(monitorIds = listOf(createdMonitor.id)), + listOf(TEST_HR_BACKEND_ROLE, "role1", "role2"), + ) assertNotNull("The workflow was not created", createdWorkflow) // user should have access to the admin monitor @@ -571,15 +610,16 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS), ) - val getWorkflowResponse = userClient?.makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getWorkflowResponse = + userClient?.makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get workflow failed", RestStatus.OK, getWorkflowResponse?.restStatus()) // Remove good backend role and ensure no access is granted after @@ -589,7 +629,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -599,6 +639,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update workflow with enable filter by with removing a permission`() { enableFilterBy() if (!isHttps()) { @@ -611,14 +652,16 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, randomQueryLevelMonitor(), listOf(TEST_HR_BACKEND_ROLE, "role2")) - val createdWorkflow = createWorkflowWithClient( - client = userClient!!, workflow = randomWorkflow(enabled = true, monitorIds = listOf(createdMonitor.id)), - rbacRoles = listOf(TEST_HR_BACKEND_ROLE, "role2") - ) + val createdWorkflow = + createWorkflowWithClient( + client = userClient!!, + workflow = randomWorkflow(enabled = true, monitorIds = listOf(createdMonitor.id)), + rbacRoles = listOf(TEST_HR_BACKEND_ROLE, "role2"), + ) assertNotNull("The workflow was not created", createdWorkflow) // getUser should have access to the monitor @@ -628,17 +671,20 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000).build() - - val getWorkflowResponse = getUserClient?.makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .build() + + val getWorkflowResponse = + getUserClient?.makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get workflow failed", RestStatus.OK, getWorkflowResponse?.restStatus()) // Remove backend role from monitor @@ -650,7 +696,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${updatedWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -664,6 +710,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update workflow with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -677,17 +724,18 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf("role2")) assertNotNull("The monitor was not created", createdMonitor) - val createdWorkflow = createWorkflowWithClient( - userClient!!, - randomWorkflow(monitorIds = listOf(createdMonitor.id)), - listOf("role2") - ) + val createdWorkflow = + createWorkflowWithClient( + userClient!!, + randomWorkflow(monitorIds = listOf(createdMonitor.id)), + listOf("role2"), + ) assertNotNull("The workflow was not created", createdWorkflow) @@ -701,6 +749,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update workflow as admin with enable filter by with no backend roles`() { enableFilterBy() if (!isHttps()) { @@ -716,26 +765,29 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false - ) - val workflow = randomWorkflow( - monitorIds = listOf(createdMonitorResponse.id) + false, ) + val workflow = + randomWorkflow( + monitorIds = listOf(createdMonitorResponse.id), + ) - val createdWorkflow = createWorkflowWithClient( - client(), - workflow = workflow, - rbacRoles = listOf(TEST_HR_BACKEND_ROLE) - ) + val createdWorkflow = + createWorkflowWithClient( + client(), + workflow = workflow, + rbacRoles = listOf(TEST_HR_BACKEND_ROLE), + ) assertNotNull("The workflow was not created", createdWorkflow) - val getWorkflowResponse = userClient?.makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") - ) + val getWorkflowResponse = + userClient?.makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get workflow failed", RestStatus.OK, getWorkflowResponse?.restStatus()) val updatedWorkflow = updateWorkflowWithClient(client(), createdWorkflow, listOf()) @@ -745,7 +797,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${updatedWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -756,6 +808,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update workflow with enable filter by with updating with a permission user has no access to and throw exception`() { enableFilterBy() if (!isHttps()) { @@ -769,16 +822,18 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE, "role2")) assertNotNull("The monitor was not created", createdMonitor) - val createdWorkflow = createWorkflowWithClient( - userClient!!, - workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id)), listOf(TEST_HR_BACKEND_ROLE, "role2") - ) + val createdWorkflow = + createWorkflowWithClient( + userClient!!, + workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id)), + listOf(TEST_HR_BACKEND_ROLE, "role2"), + ) assertNotNull("The workflow was not created", createdWorkflow) @@ -789,17 +844,20 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000).build() - - val getWorkflowResponse = getUserClient?.makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .build() + + val getWorkflowResponse = + getUserClient?.makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get workflow failed", RestStatus.OK, getWorkflowResponse?.restStatus()) try { @@ -816,6 +874,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update workflow as another user with enable filter by with removing a permission and adding permission`() { enableFilterBy() if (!isHttps()) { @@ -829,16 +888,17 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE)) assertNotNull("The monitor was not created", createdMonitor) - val createdWorkflow = createWorkflowWithClient( - userClient!!, - workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true) - ) + val createdWorkflow = + createWorkflowWithClient( + userClient!!, + workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true), + ) assertNotNull("The workflow was not created", createdWorkflow) @@ -848,11 +908,13 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { updateUser, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role5"), - false + false, ) - val updateUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) - .setSocketTimeout(60000).build() + val updateUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), updateUser, password) + .setSocketTimeout(60000) + .build() val updatedWorkflow = updateWorkflowWithClient(updateUserClient, createdWorkflow, listOf("role5")) // old user should no longer have access @@ -861,7 +923,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${updatedWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -874,6 +936,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test update workflow as admin with enable filter by with removing a permission`() { enableFilterBy() if (!isHttps()) { @@ -887,17 +950,18 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val createdMonitor = createMonitorWithClient(userClient!!, monitor = monitor, listOf(TEST_HR_BACKEND_ROLE, "role2")) assertNotNull("The monitor was not created", createdMonitor) - val createdWorkflow = createWorkflowWithClient( - userClient!!, - workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id)), - listOf(TEST_HR_BACKEND_ROLE, "role2") - ) + val createdWorkflow = + createWorkflowWithClient( + userClient!!, + workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id)), + listOf(TEST_HR_BACKEND_ROLE, "role2"), + ) assertNotNull("The workflow was not created", createdWorkflow) // getUser should have access to the monitor @@ -907,17 +971,20 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { TEST_HR_INDEX, TEST_HR_ROLE, listOf("role1", "role2"), - getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS) - ) - val getUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) - .setSocketTimeout(60000).build() - - val getWorkflowResponse = getUserClient?.makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", - null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS), ) + val getUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), getUser, password) + .setSocketTimeout(60000) + .build() + + val getWorkflowResponse = + getUserClient?.makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", + null, + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), + ) assertEquals("Get workflow failed", RestStatus.OK, getWorkflowResponse?.restStatus()) // Remove backend role from monitor @@ -929,7 +996,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${updatedWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -945,7 +1012,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${updatedWorkflow.id}", null, - BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json") + BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), ) fail("Expected Forbidden exception") } catch (e: ResponseException) { @@ -957,6 +1024,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete workflow with disable filter by`() { disableFilterBy() val monitor = randomQueryLevelMonitor(enabled = true) @@ -972,18 +1040,20 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { val searchMonitor = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", createdMonitor.id)).toString() // Verify if the delegate monitors are deleted // search as "admin" - must get 0 docs - val adminMonitorSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(searchMonitor, ContentType.APPLICATION_JSON) - ) + val adminMonitorSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(searchMonitor, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminMonitorSearchResponse.restStatus()) - val adminMonitorHits = createParser( - XContentType.JSON.xContent(), - adminMonitorSearchResponse.entity.content - ).map()["hits"]!! as Map> + val adminMonitorHits = + createParser( + XContentType.JSON.xContent(), + adminMonitorSearchResponse.entity.content, + ).map()["hits"]!! as Map> val adminMonitorDocsFound = adminMonitorHits["total"]?.get("value") assertEquals("Monitor found during search", 0, adminMonitorDocsFound) @@ -993,7 +1063,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", emptyMap(), - null + null, ) fail("Workflow found during search") } catch (e: ResponseException) { @@ -1001,6 +1071,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete workflow with enable filter by`() { enableFilterBy() if (!isHttps()) { @@ -1008,11 +1079,12 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { // refer: `test create monitor with enable filter by` return } - val createdMonitor = createMonitorWithClient( - monitor = randomQueryLevelMonitor(), - client = client(), - rbacRoles = listOf("admin") - ) + val createdMonitor = + createMonitorWithClient( + monitor = randomQueryLevelMonitor(), + client = client(), + rbacRoles = listOf("admin"), + ) assertNotNull("The monitor was not created", createdMonitor) @@ -1025,18 +1097,20 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { // Verify underlying delegates deletion val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", createdMonitor.id)).toString() // search as "admin" - must get 0 docs - val adminSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminSearchResponse.restStatus()) - val adminHits = createParser( - XContentType.JSON.xContent(), - adminSearchResponse.entity.content - ).map()["hits"]!! as Map> + val adminHits = + createParser( + XContentType.JSON.xContent(), + adminSearchResponse.entity.content, + ).map()["hits"]!! as Map> val adminDocsFound = adminHits["total"]?.get("value") assertEquals("Monitor found during search", 0, adminDocsFound) @@ -1046,7 +1120,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "GET", "$WORKFLOW_ALERTING_BASE_URI/${createdWorkflow.id}", emptyMap(), - null + null, ) fail("Workflow found during search") } catch (e: ResponseException) { @@ -1054,6 +1128,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete workflow with enable filter with user that doesn't have delete_monitor cluster privilege failed`() { enableFilterBy() if (!isHttps()) { @@ -1065,7 +1140,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { user, listOf(ALERTING_FULL_ACCESS_ROLE, READALL_AND_MONITOR_ROLE), listOf(TEST_HR_BACKEND_ROLE, "role2"), - false + false, ) val deleteUser = "deleteUser" @@ -1076,21 +1151,24 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { listOf("role1", "role3"), listOf( getClusterPermissionsFromCustomRole(ALERTING_DELETE_WORKFLOW_ACCESS), - getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS) - ) + getClusterPermissionsFromCustomRole(ALERTING_GET_WORKFLOW_ACCESS), + ), ) - val deleteUserClient = SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), deleteUser, password) - .setSocketTimeout(60000).build() + val deleteUserClient = + SecureRestClientBuilder(clusterHosts.toTypedArray(), isHttps(), deleteUser, password) + .setSocketTimeout(60000) + .build() try { val createdMonitor = createMonitorWithClient(userClient!!, monitor = randomQueryLevelMonitor()) assertNotNull("The monitor was not created", createdMonitor) - val createdWorkflow = createWorkflowWithClient( - client = userClient!!, - workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true) - ) + val createdWorkflow = + createWorkflowWithClient( + client = userClient!!, + workflow = randomWorkflow(monitorIds = listOf(createdMonitor.id), enabled = true), + ) assertNotNull("The workflow was not created", createdWorkflow) assertTrue("The workflow was not enabled", createdWorkflow.enabled) @@ -1112,37 +1190,40 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test execute workflow with an user with execute workflow access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_EXECUTE_WORKFLOW_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_EXECUTE_WORKFLOW_ACCESS), ) val monitor = createRandomMonitor(true) val workflow = createRandomWorkflow(listOf(monitor.id), true) try { - val executeWorkflowResponse = userClient?.makeRequest( - "POST", - "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}/_execute", - mutableMapOf() - ) + val executeWorkflowResponse = + userClient?.makeRequest( + "POST", + "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}/_execute", + mutableMapOf(), + ) assertEquals("Executing workflow failed", RestStatus.OK, executeWorkflowResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) } } + @Test fun `test execute workflow with an user without execute workflow access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) val monitor = createRandomMonitor(true) @@ -1152,7 +1233,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { userClient?.makeRequest( "POST", "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}/_execute", - mutableMapOf() + mutableMapOf(), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -1162,13 +1243,14 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete workflow with an user with delete workflow access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_DELETE_WORKFLOW_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_DELETE_WORKFLOW_ACCESS), ) val monitor = createRandomMonitor(true) @@ -1176,37 +1258,40 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { val refresh = true try { - val deleteWorkflowResponse = userClient?.makeRequest( - "DELETE", - "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}?refresh=$refresh", - emptyMap(), - monitor.toHttpEntity() - ) + val deleteWorkflowResponse = + userClient?.makeRequest( + "DELETE", + "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}?refresh=$refresh", + emptyMap(), + monitor.toHttpEntity(), + ) assertEquals("DELETE workflow failed", RestStatus.OK, deleteWorkflowResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) } } + @Test fun `test delete workflow with deleting delegates with an user with delete workflow access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_DELETE_WORKFLOW_ACCESS) + getClusterPermissionsFromCustomRole(ALERTING_DELETE_WORKFLOW_ACCESS), ) val monitor = createRandomMonitor(true) val workflow = createRandomWorkflow(monitorIds = listOf(monitor.id)) try { - val deleteWorkflowResponse = deleteWorkflowWithClient( - userClient!!, - workflow, - deleteDelegates = true, - refresh = true - ) + val deleteWorkflowResponse = + deleteWorkflowWithClient( + userClient!!, + workflow, + deleteDelegates = true, + refresh = true, + ) assertEquals("DELETE workflow failed", RestStatus.OK, deleteWorkflowResponse?.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -1214,29 +1299,32 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { // Verify delegate deletion val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", monitor.id)).toString() // search as "admin" - must get 0 docs - val adminSearchResponse = client().makeRequest( - "POST", - "$ALERTING_BASE_URI/_search", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminSearchResponse = + client().makeRequest( + "POST", + "$ALERTING_BASE_URI/_search", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Search monitor failed", RestStatus.OK, adminSearchResponse.restStatus()) - val adminHits = createParser( - XContentType.JSON.xContent(), - adminSearchResponse.entity.content - ).map()["hits"]!! as Map> + val adminHits = + createParser( + XContentType.JSON.xContent(), + adminSearchResponse.entity.content, + ).map()["hits"]!! as Map> val adminDocsFound = adminHits["total"]?.get("value") assertEquals("Monitor found during search", 0, adminDocsFound) } + @Test fun `test delete workflow with an user without delete monitor access`() { createUserWithTestDataAndCustomRole( user, TEST_HR_INDEX, TEST_HR_ROLE, listOf(TEST_HR_BACKEND_ROLE), - getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE) + getClusterPermissionsFromCustomRole(ALERTING_NO_ACCESS_ROLE), ) val monitor = createRandomMonitor(true) @@ -1247,7 +1335,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "DELETE", "$WORKFLOW_ALERTING_BASE_URI/${workflow.id}?refresh=true", emptyMap(), - monitor.toHttpEntity() + monitor.toHttpEntity(), ) fail("Expected 403 Method FORBIDDEN response") } catch (e: ResponseException) { @@ -1257,59 +1345,68 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test admin all access with enable filter by`() { enableFilterBy() createUserWithTestData(user, TEST_HR_INDEX, TEST_HR_ROLE, TEST_HR_BACKEND_ROLE) createUserRolesMapping(ALERTING_FULL_ACCESS_ROLE, arrayOf(user)) try { // randomMonitor has a dummy user, api ignores the User passed as part of monitor, it picks user info from the logged-in user. - val monitor = randomQueryLevelMonitor().copy( - inputs = listOf( - SearchInput( - indices = listOf(TEST_HR_INDEX), - query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor().copy( + inputs = + listOf( + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) val createResponse = userClient?.makeRequest("POST", ALERTING_BASE_URI, emptyMap(), monitor.toHttpEntity()) assertEquals("Create monitor failed", RestStatus.CREATED, createResponse?.restStatus()) - val monitorJson = JsonXContent.jsonXContent.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - createResponse?.entity?.content - ).map() + val monitorJson = + JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + createResponse?.entity?.content, + ).map() val monitorId = monitorJson["_id"] as String val workflow = randomWorkflow(monitorIds = listOf(monitorId)) val createWorkflowResponse = userClient?.makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) assertEquals("Create workflow failed", RestStatus.CREATED, createWorkflowResponse?.restStatus()) - val workflowJson = JsonXContent.jsonXContent.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - createWorkflowResponse?.entity?.content - ).map() + val workflowJson = + JsonXContent.jsonXContent + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + createWorkflowResponse?.entity?.content, + ).map() val id: String = workflowJson["_id"] as String val search = SearchSourceBuilder().query(QueryBuilders.termQuery("_id", id)).toString() // get as "admin" - must get 1 docs - val adminGetResponse = client().makeRequest( - "GET", - "$WORKFLOW_ALERTING_BASE_URI/$id", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminGetResponse = + client().makeRequest( + "GET", + "$WORKFLOW_ALERTING_BASE_URI/$id", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Get workflow failed", RestStatus.OK, adminGetResponse.restStatus()) // delete as "admin" - val adminDeleteResponse = client().makeRequest( - "DELETE", - "$WORKFLOW_ALERTING_BASE_URI/$id", - emptyMap(), - StringEntity(search, ContentType.APPLICATION_JSON) - ) + val adminDeleteResponse = + client().makeRequest( + "DELETE", + "$WORKFLOW_ALERTING_BASE_URI/$id", + emptyMap(), + StringEntity(search, ContentType.APPLICATION_JSON), + ) assertEquals("Delete workflow failed", RestStatus.OK, adminDeleteResponse.restStatus()) } finally { deleteRoleAndRoleMapping(TEST_HR_ROLE) @@ -1317,6 +1414,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test execute workflow with bucket-level and doc-level chained monitors with user having partial index permissions`() { createUser(user, arrayOf(TEST_HR_BACKEND_ROLE)) createTestIndex(TEST_HR_INDEX) @@ -1325,7 +1423,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { TEST_HR_ROLE, TEST_HR_INDEX, TERM_DLS_QUERY, - listOf(ALERTING_INDEX_WORKFLOW_ACCESS, ALERTING_INDEX_MONITOR_ACCESS) + listOf(ALERTING_INDEX_WORKFLOW_ACCESS, ALERTING_INDEX_MONITOR_ACCESS), ) createUserRolesMapping(TEST_HR_ROLE, arrayOf(user)) @@ -1338,7 +1436,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "test_field": "a", "accessible": true } - """.trimIndent() + """.trimIndent(), ) // Add a second doc that is not accessible to the user @@ -1350,7 +1448,7 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "test_field": "b", "accessible": false } - """.trimIndent() + """.trimIndent(), ) indexDoc( @@ -1361,48 +1459,55 @@ class SecureWorkflowRestApiIT : AlertingRestTestCase() { "test_field": "c", "accessible": true } - """.trimIndent() + """.trimIndent(), ) - val compositeSources = listOf( - TermsValuesSourceBuilder("test_field").field("test_field") - ) + val compositeSources = + listOf( + TermsValuesSourceBuilder("test_field").field("test_field"), + ) val compositeAgg = CompositeAggregationBuilder("composite_agg", compositeSources) - val input = SearchInput( - indices = listOf(TEST_HR_INDEX), - query = SearchSourceBuilder().size(0).query(QueryBuilders.matchAllQuery()).aggregation(compositeAgg) - ) - val triggerScript = """ + val input = + SearchInput( + indices = listOf(TEST_HR_INDEX), + query = SearchSourceBuilder().size(0).query(QueryBuilders.matchAllQuery()).aggregation(compositeAgg), + ) + val triggerScript = + """ params.docCount > 0 - """.trimIndent() + """.trimIndent() var trigger = randomBucketLevelTrigger() - trigger = trigger.copy( - bucketSelector = BucketSelectorExtAggregationBuilder( - name = trigger.id, - bucketsPathsMap = mapOf("docCount" to "_count"), - script = Script(triggerScript), - parentBucketPath = "composite_agg", - filter = null - ), - actions = listOf() - ) - val bucketMonitor = createMonitorWithClient( - userClient!!, - randomBucketLevelMonitor( - inputs = listOf(input), - enabled = false, - triggers = listOf(trigger), - dataSources = DataSources(findingsEnabled = true) + trigger = + trigger.copy( + bucketSelector = + BucketSelectorExtAggregationBuilder( + name = trigger.id, + bucketsPathsMap = mapOf("docCount" to "_count"), + script = Script(triggerScript), + parentBucketPath = "composite_agg", + filter = null, + ), + actions = listOf(), + ) + val bucketMonitor = + createMonitorWithClient( + userClient!!, + randomBucketLevelMonitor( + inputs = listOf(input), + enabled = false, + triggers = listOf(trigger), + dataSources = DataSources(findingsEnabled = true), + ), ) - ) assertNotNull("The bucket monitor was not created", bucketMonitor) val docQuery1 = DocLevelQuery(query = "test_field:\"a\"", name = "3", fields = listOf()) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(DocLevelMonitorInput("description", listOf(TEST_HR_INDEX), listOf(docQuery1))), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)) - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(DocLevelMonitorInput("description", listOf(TEST_HR_INDEX), listOf(docQuery1))), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + ) val docMonitor = createMonitorWithClient(userClient!!, monitor1)!! assertNotNull("The doc level monitor was not created", docMonitor) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/WorkflowRestApiIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/WorkflowRestApiIT.kt index c7750d40d..ad2e02c03 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/WorkflowRestApiIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/resthandler/WorkflowRestApiIT.kt @@ -45,28 +45,34 @@ import java.util.Collections import java.util.Locale import java.util.UUID import java.util.concurrent.TimeUnit +import kotlin.test.Test @TestLogging("level:DEBUG", reason = "Debug for tests.") @Suppress("UNCHECKED_CAST") class WorkflowRestApiIT : AlertingRestTestCase() { - + @Test fun `test create workflow success`() { val index = createTestIndex() val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val createResponse = client().makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) @@ -81,38 +87,48 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertEquals("Incorrect Location header", "$WORKFLOW_ALERTING_BASE_URI/$createdId", createResponse.getHeader("Location")) } + @Test fun `test create workflow with different monitor types success`() { val index = createTestIndex() val docQuery = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val docLevelMonitorResponse = createMonitor(monitor) - val bucketLevelMonitor = randomBucketLevelMonitor( - inputs = listOf( - SearchInput( - listOf(index), - SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - .aggregation(TermsAggregationBuilder("test_agg").field("test_field")) - ) + val bucketLevelMonitor = + randomBucketLevelMonitor( + inputs = + listOf( + SearchInput( + listOf(index), + SearchSourceBuilder() + .query(QueryBuilders.matchAllQuery()) + .aggregation(TermsAggregationBuilder("test_agg").field("test_field")), + ), + ), ) - ) val bucketLevelMonitorResponse = createMonitor(bucketLevelMonitor) - val workflow = randomWorkflow( - monitorIds = listOf(docLevelMonitorResponse.id, bucketLevelMonitorResponse.id), - triggers = listOf( - randomChainedAlertTrigger(condition = Script("trigger1")), - randomChainedAlertTrigger(condition = Script("trigger2")) + val workflow = + randomWorkflow( + monitorIds = listOf(docLevelMonitorResponse.id, bucketLevelMonitorResponse.id), + triggers = + listOf( + randomChainedAlertTrigger(condition = Script("trigger1")), + randomChainedAlertTrigger(condition = Script("trigger2")), + ), ) - ) val createResponse = client().makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) @@ -151,7 +167,9 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertEquals("Delegate2 order not correct", 2, delegate2.order) assertEquals("Delegate2 id not correct", bucketLevelMonitorResponse.id, delegate2.monitorId) assertEquals( - "Delegate2 Chained finding not correct", docLevelMonitorResponse.id, delegate2.chainedMonitorFindings!!.monitorId + "Delegate2 Chained finding not correct", + docLevelMonitorResponse.id, + delegate2.chainedMonitorFindings!!.monitorId, ) assertEquals(workflowById.triggers.size, 2) @@ -161,10 +179,12 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertTrue((workflowById.triggers[1] as ChainedAlertTrigger).condition == Script("trigger2")) } + @Test fun `test create workflow without delegate failure`() { - val workflow = randomWorkflow( - monitorIds = Collections.emptyList() - ) + val workflow = + randomWorkflow( + monitorIds = Collections.emptyList(), + ) try { createWorkflow(workflow) } catch (e: ResponseException) { @@ -172,16 +192,18 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Delegates list can not be empty.") + it.contains("Delegates list can not be empty."), ) } } } + @Test fun `test create workflow duplicate delegate failure`() { - val workflow = randomWorkflow( - monitorIds = listOf("1", "1", "2") - ) + val workflow = + randomWorkflow( + monitorIds = listOf("1", "1", "2"), + ) try { createWorkflow(workflow) } catch (e: ResponseException) { @@ -189,29 +211,35 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Duplicate delegates not allowed") + it.contains("Duplicate delegates not allowed"), ) } } } + @Test fun `test create workflow delegate monitor doesn't exist failure`() { val index = createTestIndex() val docQuery = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val docLevelMonitorResponse = createMonitor(monitor) - val workflow = randomWorkflow( - monitorIds = listOf("-1", docLevelMonitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf("-1", docLevelMonitorResponse.id), + ) try { createWorkflow(workflow) } catch (e: ResponseException) { @@ -219,21 +247,24 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("are not valid monitor ids") + it.contains("are not valid monitor ids"), ) } } } + @Test fun `test create workflow sequence order not correct failure`() { - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(1, "monitor-2"), - Delegate(2, "monitor-3") - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3"), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { createWorkflow(workflow) } catch (e: ResponseException) { @@ -241,21 +272,24 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values"), ) } } } + @Test fun `test create workflow chained findings monitor not in sequence failure`() { - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")) - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { createWorkflow(workflow) @@ -264,21 +298,24 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence"), ) } } } + @Test fun `test create workflow chained findings order not correct failure`() { - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")) - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { createWorkflow(workflow) @@ -287,19 +324,22 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3"), ) } } } + @Test fun `test create workflow when monitor index not initialized failure`() { - val delegates = listOf( - Delegate(1, "monitor-1") - ) - val workflow = randomWorkflowWithDelegates( - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + ) + val workflow = + randomWorkflowWithDelegates( + delegates = delegates, + ) try { createWorkflow(workflow) @@ -308,43 +348,53 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Monitors not found") + it.contains("Monitors not found"), ) } } } + @Test fun `test create workflow delegate and chained finding monitor different indices failure`() { val index = randomAlphaOfLength(10).lowercase(Locale.ROOT) createTestIndex(index) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val docMonitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val docMonitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val docMonitorResponse = createMonitor(docMonitor) val index1 = "$index-1" createTestIndex(index1) - val docLevelInput1 = DocLevelMonitorInput( - "description", listOf(index1), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput1 = + DocLevelMonitorInput( + "description", + listOf(index1), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) - val docMonitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger) - ) + val docMonitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger), + ) val docMonitorResponse1 = createMonitor(docMonitor1) - val workflow = randomWorkflow( - monitorIds = listOf(docMonitorResponse1.id, docMonitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(docMonitorResponse1.id, docMonitorResponse.id), + ) try { createWorkflow(workflow) } catch (e: ResponseException) { @@ -352,31 +402,37 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("doesn't query all of chained findings monitor's indices") + it.contains("doesn't query all of chained findings monitor's indices"), ) } } } + @Test fun `test create workflow query monitor chained findings monitor failure`() { val index = createTestIndex() - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val docMonitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val docMonitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val docMonitorResponse = createMonitor(docMonitor) val queryMonitor = randomQueryLevelMonitor() val queryMonitorResponse = createMonitor(queryMonitor) - val workflow = randomWorkflow( - monitorIds = listOf(queryMonitorResponse.id, docMonitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(queryMonitorResponse.id, docMonitorResponse.id), + ) try { createWorkflow(workflow) } catch (e: ResponseException) { @@ -384,20 +440,22 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Query level monitor can't be part of chained findings") + it.contains("Query level monitor can't be part of chained findings"), ) } } } + @Test fun `test create workflow with 26 delegates failure`() { val monitorsIds = mutableListOf() for (i in 0..25) { monitorsIds.add(UUID.randomUUID().toString()) } - val workflow = randomWorkflow( - monitorIds = monitorsIds - ) + val workflow = + randomWorkflow( + monitorIds = monitorsIds, + ) try { createWorkflow(workflow) } catch (e: ResponseException) { @@ -405,29 +463,35 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Delegates list can not be larger then 25.") + it.contains("Delegates list can not be larger then 25."), ) } } } + @Test fun `test update workflow add monitor success`() { val index = createTestIndex() val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val createResponse = client().makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) @@ -441,17 +505,19 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertTrue("incorrect version", createdVersion > 0) assertEquals("Incorrect Location header", "$WORKFLOW_ALERTING_BASE_URI/$createdId", createResponse.getHeader("Location")) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse2 = createMonitor(monitor2) - val updatedWorkflow = randomWorkflow( - id = createdId, - monitorIds = listOf(monitorResponse.id, monitorResponse2.id) - ) + val updatedWorkflow = + randomWorkflow( + id = createdId, + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + ) val updateResponse = client().makeRequest("PUT", updatedWorkflow.relativeUrl(), emptyMap(), updatedWorkflow.toHttpEntity()) @@ -481,34 +547,43 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertEquals("Delegate2 order not correct", 2, delegate2.order) assertEquals("Delegate2 id not correct", monitorResponse2.id, delegate2.monitorId) assertEquals( - "Delegate2 Chained finding not correct", monitorResponse.id, delegate2.chainedMonitorFindings!!.monitorId + "Delegate2 Chained finding not correct", + monitorResponse.id, + delegate2.chainedMonitorFindings!!.monitorId, ) } + @Test fun `test update workflow remove monitor success`() { val index = createTestIndex() val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse2 = createMonitor(monitor2) - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id, monitorResponse2.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id, monitorResponse2.id), + ) val createResponse = client().makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) @@ -529,10 +604,11 @@ class WorkflowRestApiIT : AlertingRestTestCase() { var delegates = (workflowById.inputs as List)[0].sequence.delegates.sortedBy { it.order } assertEquals("Delegates size not correct", 2, delegates.size) - val updatedWorkflow = randomWorkflow( - id = createdId, - monitorIds = listOf(monitorResponse.id) - ) + val updatedWorkflow = + randomWorkflow( + id = createdId, + monitorIds = listOf(monitorResponse.id), + ) val updateResponse = client().makeRequest("PUT", updatedWorkflow.relativeUrl(), emptyMap(), updatedWorkflow.toHttpEntity()) @@ -558,29 +634,36 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertEquals("Delegate1 id not correct", monitorResponse.id, delegate1.monitorId) } + @Test fun `test update workflow change order of delegate monitors`() { val index = createTestIndex() val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) - val monitor2 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor2 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse1 = createMonitor(monitor1) val monitorResponse2 = createMonitor(monitor2) - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id, monitorResponse2.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id, monitorResponse2.id), + ) val workflowResponse = createWorkflow(workflow) assertNotNull("Workflow creation failed", workflowResponse) @@ -590,19 +673,20 @@ class WorkflowRestApiIT : AlertingRestTestCase() { var workflowById = getWorkflow(workflowResponse.id) assertNotNull(workflowById) - val updatedWorkflowResponse = updateWorkflow( - randomWorkflow( - id = workflowById.id, - monitorIds = listOf(monitorResponse2.id, monitorResponse1.id) + val updatedWorkflowResponse = + updateWorkflow( + randomWorkflow( + id = workflowById.id, + monitorIds = listOf(monitorResponse2.id, monitorResponse1.id), + ), ) - ) assertNotNull("Workflow creation failed", updatedWorkflowResponse) assertNotNull(updatedWorkflowResponse) assertEquals( "Workflow id changed", workflowResponse.id, - updatedWorkflowResponse.id + updatedWorkflowResponse.id, ) assertTrue("incorrect version", updatedWorkflowResponse.version > 0) @@ -614,17 +698,17 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertEquals( "Workflow name not correct", updatedWorkflowResponse.name, - workflowById.name + workflowById.name, ) assertEquals( "Workflow owner not correct", updatedWorkflowResponse.owner, - workflowById.owner + workflowById.owner, ) assertEquals( "Workflow input not correct", updatedWorkflowResponse.inputs, - workflowById.inputs + workflowById.inputs, ) // Delegate verification @@ -642,26 +726,34 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertEquals("Delegate2 order not correct", 2, delegate2.order) assertEquals("Delegate2 id not correct", monitorResponse1.id, delegate2.monitorId) assertEquals( - "Delegate2 Chained finding not correct", monitorResponse2.id, delegate2.chainedMonitorFindings!!.monitorId + "Delegate2 Chained finding not correct", + monitorResponse2.id, + delegate2.chainedMonitorFindings!!.monitorId, ) } + @Test fun `test update workflow doesn't exist failure`() { val index = createTestIndex() val docQuery1 = DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) - val monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) + val monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(randomDocumentLevelTrigger(condition = ALWAYS_RUN)), + ) val monitorResponse1 = createMonitor(monitor1) - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse1.id) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse1.id), + ) val workflowResponse = createWorkflow(workflow) assertNotNull("Workflow creation failed", workflowResponse) @@ -672,7 +764,7 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow with testId is not found") + it.contains("Workflow with testId is not found"), ) } } @@ -682,30 +774,37 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertTrue(getWorkflow.enabled) } + @Test fun `test update workflow duplicate delegate failure`() { val index = createTestIndex() - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = createWorkflow(workflow) assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflow( - id = workflowResponse.id, - monitorIds = listOf("1", "1", "2") - ) + workflow = + randomWorkflow( + id = workflowResponse.id, + monitorIds = listOf("1", "1", "2"), + ) try { updateWorkflow(workflow) } catch (e: ResponseException) { @@ -713,35 +812,42 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Duplicate delegates not allowed") + it.contains("Duplicate delegates not allowed"), ) } } } + @Test fun `test update workflow delegate monitor doesn't exist failure`() { val index = createTestIndex() - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = createWorkflow(workflow) assertNotNull("Workflow creation failed", workflowResponse) - workflow = randomWorkflow( - id = workflowResponse.id, - monitorIds = listOf("-1", monitorResponse.id) - ) + workflow = + randomWorkflow( + id = workflowResponse.id, + monitorIds = listOf("-1", monitorResponse.id), + ) try { updateWorkflow(workflow) @@ -750,40 +856,48 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("are not valid monitor ids") + it.contains("are not valid monitor ids"), ) } } } + @Test fun `test update workflow sequence order not correct failure`() { val index = createTestIndex() - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = createWorkflow(workflow) assertNotNull("Workflow creation failed", workflowResponse) - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(1, "monitor-2"), - Delegate(2, "monitor-3") - ) - workflow = randomWorkflowWithDelegates( - id = workflowResponse.id, - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3"), + ) + workflow = + randomWorkflowWithDelegates( + id = workflowResponse.id, + delegates = delegates, + ) try { updateWorkflow(workflow) } catch (e: ResponseException) { @@ -791,40 +905,48 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values") + it.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values"), ) } } } + @Test fun `test update workflow chained findings monitor not in sequence failure`() { val index = createTestIndex() - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = createWorkflow(workflow) assertNotNull("Workflow creation failed", workflowResponse) - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")) - ) - workflow = randomWorkflowWithDelegates( - id = workflowResponse.id, - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")), + ) + workflow = + randomWorkflowWithDelegates( + id = workflowResponse.id, + delegates = delegates, + ) try { updateWorkflow(workflow) @@ -833,40 +955,48 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence") + it.contains("Chained Findings Monitor monitor-x doesn't exist in sequence"), ) } } } + @Test fun `test update workflow chained findings order not correct failure`() { val index = createTestIndex() - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(DocLevelQuery(query = "source.ip.v6.v1:12345", name = "3", fields = listOf())), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger) - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + ) val monitorResponse = createMonitor(monitor) - var workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id) - ) + var workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + ) val workflowResponse = createWorkflow(workflow) assertNotNull("Workflow creation failed", workflowResponse) - val delegates = listOf( - Delegate(1, "monitor-1"), - Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), - Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")) - ) - workflow = randomWorkflowWithDelegates( - id = workflowResponse.id, - delegates = delegates - ) + val delegates = + listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")), + ) + workflow = + randomWorkflowWithDelegates( + id = workflowResponse.id, + delegates = delegates, + ) try { updateWorkflow(workflow) @@ -875,13 +1005,14 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning IndexWorkflow Action error ", - it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3") + it.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3"), ) } } } @Throws(Exception::class) + @Test fun `test getting a workflow`() { val query = randomQueryLevelMonitor() val monitor = createMonitor(query) @@ -901,6 +1032,7 @@ class WorkflowRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test getting a workflow that doesn't exist`() { try { getWorkflow(randomAlphaOfLength(20)) @@ -910,13 +1042,15 @@ class WorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test delete workflow`() { val query = randomQueryLevelMonitor() val monitor = createMonitor(query) - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) val workflowResponse = createWorkflow(workflowRequest) val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflow(workflowResponse.id) @@ -934,19 +1068,21 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } } + @Test fun `test delete workflow delete delegate monitors`() { val query = randomQueryLevelMonitor() val monitor = createMonitor(query) - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) val workflowResponse = createWorkflow(workflowRequest) val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflow(workflowResponse.id) @@ -964,7 +1100,7 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } @@ -977,19 +1113,21 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Monitor not found.") + it.contains("Monitor not found."), ) } } } + @Test fun `test delete workflow preserve delegate monitors`() { val query = randomQueryLevelMonitor() val monitor = createMonitor(query) - val workflowRequest = randomWorkflow( - monitorIds = listOf(monitor.id) - ) + val workflowRequest = + randomWorkflow( + monitorIds = listOf(monitor.id), + ) val workflowResponse = createWorkflow(workflowRequest) val workflowId = workflowResponse.id val getWorkflowResponse = getWorkflow(workflowResponse.id) @@ -1007,7 +1145,7 @@ class WorkflowRestApiIT : AlertingRestTestCase() { e.message?.let { assertTrue( "Exception not returning GetWorkflow Action error ", - it.contains("Workflow not found.") + it.contains("Workflow not found."), ) } } @@ -1018,6 +1156,7 @@ class WorkflowRestApiIT : AlertingRestTestCase() { } @Throws(Exception::class) + @Test fun `test deleting a workflow that doesn't exist`() { try { client().makeRequest("DELETE", "$WORKFLOW_ALERTING_BASE_URI/foobarbaz") @@ -1027,52 +1166,59 @@ class WorkflowRestApiIT : AlertingRestTestCase() { } } + @Test fun `test chained alerts and audit alerts for workflows with query level monitor`() { val index = createTestIndex() val docQuery1 = DocLevelQuery(query = "test_field:\"test_value_1\"", name = "3", fields = listOf()) val docLevelInput1 = DocLevelMonitorInput("description", listOf(index), listOf(docQuery1)) val trigger1 = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - var monitor1 = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput1), - triggers = listOf(trigger1), - enabled = false - ) + var monitor1 = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput1), + triggers = listOf(trigger1), + enabled = false, + ) val monitorResponse = createMonitor(monitor1)!! - var monitor2 = randomQueryLevelMonitor( - triggers = listOf(randomQueryLevelTrigger(condition = Script("return true"))), - enabled = false - ) + var monitor2 = + randomQueryLevelMonitor( + triggers = listOf(randomQueryLevelTrigger(condition = Script("return true"))), + enabled = false, + ) val monitorResponse2 = createMonitor(monitor2)!! - val andTrigger = randomChainedAlertTrigger( - name = "1And2", - condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]") - ) + val andTrigger = + randomChainedAlertTrigger( + name = "1And2", + condition = Script("monitor[id=${monitorResponse.id}] && monitor[id=${monitorResponse2.id}]"), + ) - val workflow = Workflow( - id = "", - version = 2, - name = "test", - enabled = false, - schedule = IntervalSchedule(5, ChronoUnit.MINUTES), - lastUpdateTime = Instant.now(), - enabledTime = null, - workflowType = Workflow.WorkflowType.COMPOSITE, - user = randomUser(), - schemaVersion = -1, - inputs = listOf( - CompositeInput( - org.opensearch.commons.alerting.model.Sequence( - delegates = listOf( - Delegate(1, monitorResponse.id), - Delegate(2, monitorResponse2.id) - ) - ) - ) - ), - owner = "alerting", - triggers = listOf(andTrigger) - ) + val workflow = + Workflow( + id = "", + version = 2, + name = "test", + enabled = false, + schedule = IntervalSchedule(5, ChronoUnit.MINUTES), + lastUpdateTime = Instant.now(), + enabledTime = null, + workflowType = Workflow.WorkflowType.COMPOSITE, + user = randomUser(), + schemaVersion = -1, + inputs = + listOf( + CompositeInput( + org.opensearch.commons.alerting.model.Sequence( + delegates = + listOf( + Delegate(1, monitorResponse.id), + Delegate(2, monitorResponse2.id), + ), + ), + ), + ), + owner = "alerting", + triggers = listOf(andTrigger), + ) val workflowById = createWorkflow(workflow) assertNotNull(workflowById) val workflowId = workflowById.id @@ -1080,8 +1226,8 @@ class WorkflowRestApiIT : AlertingRestTestCase() { insertSampleTimeSerializedData( index, listOf( - "test_value_1" - ) + "test_value_1", + ), ) val searchMonitorResponse = searchMonitors() logger.error(searchMonitorResponse) @@ -1090,8 +1236,11 @@ class WorkflowRestApiIT : AlertingRestTestCase() { var numWorkflows = 0 jobsList.forEach { val map = it.sourceAsMap - if (map["type"] == "workflow") numWorkflows++ - else if (map["type"] == "monitor") numMonitors++ + if (map["type"] == "workflow") { + numWorkflows++ + } else if (map["type"] == "monitor") { + numMonitors++ + } } Assert.assertEquals(numMonitors, 2) Assert.assertEquals(numWorkflows, 1) @@ -1103,7 +1252,7 @@ class WorkflowRestApiIT : AlertingRestTestCase() { val workflowTriggerResults = executeWorkflowResponse["trigger_results"] as Map assertEquals(workflowTriggerResults.size, 1) assertTrue( - (workflowTriggerResults[andTrigger.id] as Map)["triggered"] as Boolean + (workflowTriggerResults[andTrigger.id] as Map)["triggered"] as Boolean, ) val res = getWorkflowAlerts(workflowId = workflowId, getAssociatedAlerts = true) val getWorkflowAlerts = entityAsMap(res) @@ -1145,26 +1294,32 @@ class WorkflowRestApiIT : AlertingRestTestCase() { Assert.assertEquals(acknowledged[0], alerts1[0]["id"]) } + @Test fun `test run workflow as scheduled job success`() { val index = createTestIndex() val docQuery1 = DocLevelQuery(query = "test_field:\"us-west-2\"", name = "3", fields = listOf()) - val docLevelInput = DocLevelMonitorInput( - "description", listOf(index), listOf(docQuery1) - ) + val docLevelInput = + DocLevelMonitorInput( + "description", + listOf(index), + listOf(docQuery1), + ) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = randomDocumentLevelMonitor( - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - enabled = false - ) + val monitor = + randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + enabled = false, + ) val monitorResponse = createMonitor(monitor) - val workflow = randomWorkflow( - monitorIds = listOf(monitorResponse.id), - enabled = true, - schedule = IntervalSchedule(1, ChronoUnit.MINUTES) - ) + val workflow = + randomWorkflow( + monitorIds = listOf(monitorResponse.id), + enabled = true, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES), + ) val createResponse = client().makeRequest("POST", WORKFLOW_ALERTING_BASE_URI, emptyMap(), workflow.toHttpEntity()) @@ -1193,6 +1348,7 @@ class WorkflowRestApiIT : AlertingRestTestCase() { assertEquals("Findings saved for test monitor", 1, findings.size) } + @Test fun `test workflow run generates no error alerts with versionconflictengineexception with locks`() { val testIndex = createTestIndex() val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(ZonedDateTime.now().truncatedTo(ChronoUnit.MILLIS)) @@ -1206,22 +1362,23 @@ class WorkflowRestApiIT : AlertingRestTestCase() { val docLevelInput = DocLevelMonitorInput("description", listOf(testIndex), listOf(docQuery)) val trigger = randomDocumentLevelTrigger(condition = ALWAYS_RUN) - val monitor = createMonitor( - randomDocumentLevelMonitor( - name = "__lag-monitor-test__", - inputs = listOf(docLevelInput), - triggers = listOf(trigger), - enabled = false, - schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES) + val monitor = + createMonitor( + randomDocumentLevelMonitor( + name = "__lag-monitor-test__", + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + enabled = false, + schedule = IntervalSchedule(interval = 1, unit = ChronoUnit.MINUTES), + ), ) - ) assertNotNull(monitor.id) createWorkflow( randomWorkflow( monitorIds = listOf(monitor.id), enabled = true, - schedule = IntervalSchedule(1, ChronoUnit.MINUTES) - ) + schedule = IntervalSchedule(1, ChronoUnit.MINUTES), + ), ) indexDoc(testIndex, "1", testDoc) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/settings/AlertingSettingsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/settings/AlertingSettingsTests.kt index 6ee8c4997..6d7775121 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/settings/AlertingSettingsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/settings/AlertingSettingsTests.kt @@ -13,9 +13,9 @@ import org.opensearch.common.settings.Settings import org.opensearch.common.unit.TimeValue import org.opensearch.test.OpenSearchTestCase import java.util.concurrent.TimeUnit +import kotlin.test.Test class AlertingSettingsTests : OpenSearchTestCase() { - private lateinit var plugin: AlertingPlugin @Before @@ -23,6 +23,7 @@ class AlertingSettingsTests : OpenSearchTestCase() { plugin = AlertingPlugin() } + @Test fun `test all opendistro settings returned`() { val settings = plugin.settings assertTrue( @@ -50,12 +51,13 @@ class AlertingSettingsTests : OpenSearchTestCase() { LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_RETRY_COUNT, LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_MILLIS, LegacyOpenDistroScheduledJobSettings.SWEEPER_ENABLED, - LegacyOpenDistroScheduledJobSettings.REQUEST_TIMEOUT - ) - ) + LegacyOpenDistroScheduledJobSettings.REQUEST_TIMEOUT, + ), + ), ) } + @Test fun `test all opensearch settings returned`() { val settings = plugin.settings assertTrue( @@ -85,23 +87,25 @@ class AlertingSettingsTests : OpenSearchTestCase() { ScheduledJobSettings.SWEEP_BACKOFF_RETRY_COUNT, ScheduledJobSettings.SWEEP_BACKOFF_MILLIS, ScheduledJobSettings.SWEEPER_ENABLED, - ScheduledJobSettings.REQUEST_TIMEOUT - ) - ) + ScheduledJobSettings.REQUEST_TIMEOUT, + ), + ), ) } + @Test fun `test opendistro settings fallback`() { assertEquals( AlertingSettings.MOVE_ALERTS_BACKOFF_COUNT.get(Settings.EMPTY), - LegacyOpenDistroAlertingSettings.MOVE_ALERTS_BACKOFF_COUNT.get(Settings.EMPTY) + LegacyOpenDistroAlertingSettings.MOVE_ALERTS_BACKOFF_COUNT.get(Settings.EMPTY), ) assertEquals( ScheduledJobSettings.REQUEST_TIMEOUT.get(Settings.EMPTY), - LegacyOpenDistroScheduledJobSettings.REQUEST_TIMEOUT.get(Settings.EMPTY) + LegacyOpenDistroScheduledJobSettings.REQUEST_TIMEOUT.get(Settings.EMPTY), ) } + @Test fun `test settings get Value`() { val settings = Settings.builder().put("plugins.alerting.move_alerts_backoff_count", 1).build() assertEquals(AlertingSettings.MOVE_ALERTS_BACKOFF_COUNT.get(settings), 1) @@ -111,30 +115,34 @@ class AlertingSettingsTests : OpenSearchTestCase() { assertEquals(LegacyOpenDistroScheduledJobSettings.SWEEPER_ENABLED.get(scheduledJobSettings), true) } + @Test fun `test settings get value with legacy Fallback`() { - val settings = Settings.builder() - .put("opendistro.alerting.monitor.max_monitors", 1000) - .put("opendistro.alerting.input_timeout", TimeValue.timeValueSeconds(30)) - .put("opendistro.alerting.index_timeout", TimeValue.timeValueSeconds(60)) - .put("opendistro.alerting.bulk_timeout", TimeValue.timeValueSeconds(120)) - .put("opendistro.alerting.alert_backoff_millis", TimeValue.timeValueMillis(50)) - .put("opendistro.alerting.alert_backoff_count", 2) - .put("opendistro.alerting.move_alerts_backoff_millis", TimeValue.timeValueMillis(250)) - .put("opendistro.alerting.move_alerts_backoff_count", 3) - .put("opendistro.alerting.alert_history_enabled", true) - .put("opendistro.alerting.alert_history_rollover_period", TimeValue.timeValueHours(12)) - .put("opendistro.alerting.alert_history_max_age", TimeValue(30, TimeUnit.DAYS)) - .put("opendistro.alerting.alert_history_max_docs", 1000L) - .put("opendistro.alerting.alert_history_retention_period", TimeValue(60, TimeUnit.DAYS)) - .put("opendistro.alerting.request_timeout", TimeValue.timeValueSeconds(10)) - .put("opendistro.alerting.action_throttle_max_value", TimeValue.timeValueHours(24)) - .put("opendistro.alerting.filter_by_backend_roles", false) - .put("opendistro.scheduled_jobs.enabled", false) - .put("opendistro.scheduled_jobs.request_timeout", TimeValue.timeValueSeconds(10)) - .put("opendistro.scheduled_jobs.sweeper.backoff_millis", TimeValue.timeValueMillis(50)) - .put("opendistro.scheduled_jobs.retry_count", 3) - .put("opendistro.scheduled_jobs.sweeper.period", TimeValue.timeValueMinutes(5)) - .put("opendistro.scheduled_jobs.sweeper.page_size", 100).build() + val settings = + Settings + .builder() + .put("opendistro.alerting.monitor.max_monitors", 1000) + .put("opendistro.alerting.input_timeout", TimeValue.timeValueSeconds(30)) + .put("opendistro.alerting.index_timeout", TimeValue.timeValueSeconds(60)) + .put("opendistro.alerting.bulk_timeout", TimeValue.timeValueSeconds(120)) + .put("opendistro.alerting.alert_backoff_millis", TimeValue.timeValueMillis(50)) + .put("opendistro.alerting.alert_backoff_count", 2) + .put("opendistro.alerting.move_alerts_backoff_millis", TimeValue.timeValueMillis(250)) + .put("opendistro.alerting.move_alerts_backoff_count", 3) + .put("opendistro.alerting.alert_history_enabled", true) + .put("opendistro.alerting.alert_history_rollover_period", TimeValue.timeValueHours(12)) + .put("opendistro.alerting.alert_history_max_age", TimeValue(30, TimeUnit.DAYS)) + .put("opendistro.alerting.alert_history_max_docs", 1000L) + .put("opendistro.alerting.alert_history_retention_period", TimeValue(60, TimeUnit.DAYS)) + .put("opendistro.alerting.request_timeout", TimeValue.timeValueSeconds(10)) + .put("opendistro.alerting.action_throttle_max_value", TimeValue.timeValueHours(24)) + .put("opendistro.alerting.filter_by_backend_roles", false) + .put("opendistro.scheduled_jobs.enabled", false) + .put("opendistro.scheduled_jobs.request_timeout", TimeValue.timeValueSeconds(10)) + .put("opendistro.scheduled_jobs.sweeper.backoff_millis", TimeValue.timeValueMillis(50)) + .put("opendistro.scheduled_jobs.retry_count", 3) + .put("opendistro.scheduled_jobs.sweeper.period", TimeValue.timeValueMinutes(5)) + .put("opendistro.scheduled_jobs.sweeper.page_size", 100) + .build() assertEquals(AlertingSettings.ALERTING_MAX_MONITORS.get(settings), 1000) assertEquals(AlertingSettings.INPUT_TIMEOUT.get(settings), TimeValue.timeValueSeconds(30)) @@ -182,8 +190,8 @@ class AlertingSettingsTests : OpenSearchTestCase() { LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_MILLIS, LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_RETRY_COUNT, LegacyOpenDistroScheduledJobSettings.SWEEP_PAGE_SIZE, - LegacyOpenDistroScheduledJobSettings.SWEEP_PERIOD - ) + LegacyOpenDistroScheduledJobSettings.SWEEP_PERIOD, + ), ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/settings/DestinationSettingsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/settings/DestinationSettingsTests.kt index 2e96c1fad..719f3af65 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/settings/DestinationSettingsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/settings/DestinationSettingsTests.kt @@ -9,6 +9,7 @@ import org.junit.Before import org.opensearch.alerting.AlertingPlugin import org.opensearch.common.settings.Settings import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class DestinationSettingsTests : OpenSearchTestCase() { private lateinit var plugin: AlertingPlugin @@ -18,6 +19,7 @@ class DestinationSettingsTests : OpenSearchTestCase() { plugin = AlertingPlugin() } + @Test fun `test all opendistro destination settings returned`() { val settings = plugin.settings assertTrue( @@ -25,12 +27,13 @@ class DestinationSettingsTests : OpenSearchTestCase() { settings.containsAll( listOf( LegacyOpenDistroDestinationSettings.ALLOW_LIST, - LegacyOpenDistroDestinationSettings.HOST_DENY_LIST - ) - ) + LegacyOpenDistroDestinationSettings.HOST_DENY_LIST, + ), + ), ) } + @Test fun `test all opensearch destination settings returned`() { val settings = plugin.settings assertTrue( @@ -38,27 +41,32 @@ class DestinationSettingsTests : OpenSearchTestCase() { settings.containsAll( listOf( DestinationSettings.ALLOW_LIST, - DestinationSettings.HOST_DENY_LIST - ) - ) + DestinationSettings.HOST_DENY_LIST, + ), + ), ) } + @Test fun `test opendistro settings fallback`() { assertEquals( DestinationSettings.ALLOW_LIST.get(Settings.EMPTY), - LegacyOpenDistroDestinationSettings.ALLOW_LIST.get(Settings.EMPTY) + LegacyOpenDistroDestinationSettings.ALLOW_LIST.get(Settings.EMPTY), ) assertEquals( DestinationSettings.HOST_DENY_LIST.get(Settings.EMPTY), - LegacyOpenDistroDestinationSettings.HOST_DENY_LIST.get(Settings.EMPTY) + LegacyOpenDistroDestinationSettings.HOST_DENY_LIST.get(Settings.EMPTY), ) } + @Test fun `test settings get Value with legacy fallback`() { - val settings = Settings.builder() - .putList("opendistro.alerting.destination.allow_list", listOf("1")) - .putList("opendistro.destination.host.deny_list", emptyList()).build() + val settings = + Settings + .builder() + .putList("opendistro.alerting.destination.allow_list", listOf("1")) + .putList("opendistro.destination.host.deny_list", emptyList()) + .build() assertEquals(DestinationSettings.ALLOW_LIST.get(settings), listOf("1")) assertEquals(DestinationSettings.HOST_DENY_LIST.get(settings), emptyList()) @@ -66,8 +74,8 @@ class DestinationSettingsTests : OpenSearchTestCase() { assertSettingDeprecationsAndWarnings( arrayOf( LegacyOpenDistroDestinationSettings.ALLOW_LIST, - LegacyOpenDistroDestinationSettings.HOST_DENY_LIST - ) + LegacyOpenDistroDestinationSettings.HOST_DENY_LIST, + ), ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt index 06af4c3d3..c5753c60f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/AlertingSingleNodeTestCase.kt @@ -80,7 +80,6 @@ import java.util.concurrent.TimeUnit */ @ThreadLeakScope(ThreadLeakScope.Scope.NONE) abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { - protected val index: String = randomAlphaOfLength(10).lowercase(Locale.ROOT) override fun setUp() { @@ -89,30 +88,42 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { } protected fun getAllIndicesFromPattern(pattern: String): List { - val getIndexResponse = ( - client().admin().indices().prepareGetIndex() - .setIndices(pattern) as GetIndexRequestBuilder + val getIndexResponse = + ( + client() + .admin() + .indices() + .prepareGetIndex() + .setIndices(pattern) as GetIndexRequestBuilder ).get() as GetIndexResponse getIndexResponse return getIndexResponse.indices().toList() } - protected fun executeMonitor(monitor: Monitor, id: String?, dryRun: Boolean = true): ExecuteMonitorResponse? { + protected fun executeMonitor( + monitor: Monitor, + id: String?, + dryRun: Boolean = true, + ): ExecuteMonitorResponse? { val request = ExecuteMonitorRequest(dryRun, TimeValue(Instant.now().toEpochMilli()), id, monitor) return client().execute(ExecuteMonitorAction.INSTANCE, request).get() } - protected fun insertSampleTimeSerializedData(index: String, data: List) { + protected fun insertSampleTimeSerializedData( + index: String, + data: List, + ) { data.forEachIndexed { i, value -> val twoMinsAgo = ZonedDateTime.now().minus(2, ChronoUnit.MINUTES).truncatedTo(ChronoUnit.MILLIS) val testTime = DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(twoMinsAgo) - val testDoc = """ + val testDoc = + """ { "test_strict_date_time": "$testTime", "test_field_1": "$value", "number": "$i" } - """.trimIndent() + """.trimIndent() // Indexing documents with deterministic doc id to allow for easy selected deletion during testing indexDoc(index, (i + 1).toString(), testDoc) } @@ -127,7 +138,8 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { /** A test index that can be used across tests. Feel free to add new fields but don't remove any. */ protected fun createTestIndex() { val mapping = XContentFactory.jsonBuilder() - mapping.startObject() + mapping + .startObject() .startObject("properties") .startObject("test_strict_date_time") .field("type", "date") @@ -140,13 +152,16 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { .endObject() createIndex( - index, Settings.EMPTY, mapping + index, + Settings.EMPTY, + mapping, ) } protected fun createTestIndex(index: String) { val mapping = XContentFactory.jsonBuilder() - mapping.startObject() + mapping + .startObject() .startObject("properties") .startObject("test_strict_date_time") .field("type", "date") @@ -159,7 +174,9 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { .endObject() createIndex( - index, Settings.EMPTY, mapping + index, + Settings.EMPTY, + mapping, ) } @@ -168,85 +185,123 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { settings: Settings?, mappings: XContentBuilder?, ): IndexService? { - val createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings) + val createIndexRequestBuilder = + client() + .admin() + .indices() + .prepareCreate(index) + .setSettings(settings) if (mappings != null) { createIndexRequestBuilder.setMapping(mappings) } return this.createIndex(index, createIndexRequestBuilder) } - protected fun indexDoc(index: String, id: String, doc: String) { - client().prepareIndex(index).setId(id) - .setSource(doc, XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get() + protected fun indexDoc( + index: String, + id: String, + doc: String, + ) { + client() + .prepareIndex(index) + .setId(id) + .setSource(doc, XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get() } protected fun assertIndexExists(index: String) { val getIndexResponse = - client().admin().indices().getIndex( - GetIndexRequest().indices(index).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN) - ).get() + client() + .admin() + .indices() + .getIndex( + GetIndexRequest().indices(index).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), + ).get() assertTrue(getIndexResponse.indices.size > 0) } protected fun assertIndexNotExists(index: String) { val getIndexResponse = - client().admin().indices().getIndex( - GetIndexRequest().indices(index).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN) - ).get() + client() + .admin() + .indices() + .getIndex( + GetIndexRequest().indices(index).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN), + ).get() assertFalse(getIndexResponse.indices.size > 0) } protected fun assertAliasNotExists(alias: String) { - val aliasesResponse = client().admin().indices().getAliases(GetAliasesRequest()).get() - val foundAlias = aliasesResponse.aliases.values.forEach { - it.forEach { it1 -> - if (it1.alias == alias) { - fail("alias exists, but it shouldn't") + val aliasesResponse = + client() + .admin() + .indices() + .getAliases(GetAliasesRequest()) + .get() + val foundAlias = + aliasesResponse.aliases.values.forEach { + it.forEach { it1 -> + if (it1.alias == alias) { + fail("alias exists, but it shouldn't") + } } } - } } protected fun assertAliasExists(alias: String) { - val aliasesResponse = client().admin().indices().getAliases(GetAliasesRequest()).get() - val foundAlias = aliasesResponse.aliases.values.forEach { - it.forEach { it1 -> - if (it1.alias == alias) { - return + val aliasesResponse = + client() + .admin() + .indices() + .getAliases(GetAliasesRequest()) + .get() + val foundAlias = + aliasesResponse.aliases.values.forEach { + it.forEach { it1 -> + if (it1.alias == alias) { + return + } } } - } fail("alias doesn't exists, but it should") } protected fun createMonitor(monitor: Monitor): IndexMonitorResponse? { - val request = IndexMonitorRequest( - monitorId = Monitor.NO_ID, - seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, - refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), - method = RestRequest.Method.POST, - monitor = monitor - ) + val request = + IndexMonitorRequest( + monitorId = Monitor.NO_ID, + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, + primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), + method = RestRequest.Method.POST, + monitor = monitor, + ) return client().execute(AlertingActions.INDEX_MONITOR_ACTION_TYPE, request).actionGet() } - protected fun updateMonitor(monitor: Monitor, monitorId: String): IndexMonitorResponse? { - val request = IndexMonitorRequest( - monitorId = monitorId, - seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, - refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), - method = RestRequest.Method.PUT, - monitor = monitor - ) + protected fun updateMonitor( + monitor: Monitor, + monitorId: String, + ): IndexMonitorResponse? { + val request = + IndexMonitorRequest( + monitorId = monitorId, + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, + primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), + method = RestRequest.Method.PUT, + monitor = monitor, + ) return client().execute(AlertingActions.INDEX_MONITOR_ACTION_TYPE, request).actionGet() } protected fun deleteMonitor(monitorId: String): Boolean { - client().execute( - AlertingActions.DELETE_MONITOR_ACTION_TYPE, DeleteMonitorRequest(monitorId, WriteRequest.RefreshPolicy.IMMEDIATE) - ).get() + client() + .execute( + AlertingActions.DELETE_MONITOR_ACTION_TYPE, + DeleteMonitorRequest(monitorId, WriteRequest.RefreshPolicy.IMMEDIATE), + ).get() return true } @@ -270,7 +325,12 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { bqb.must(TermQueryBuilder(Alert.EXECUTION_ID_FIELD, executionId)) } ssb.query(bqb) - val searchResponse = client().prepareSearch(indices).setRouting(monitorId).setSource(ssb).get() + val searchResponse = + client() + .prepareSearch(indices) + .setRouting(monitorId) + .setSource(ssb) + .get() return searchResponse.hits.hits.map { val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } @@ -286,22 +346,22 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { associatedAlertsIndex: String? = "", alertIds: List? = emptyList(), table: Table? = Table("asc", "monitor_id", null, 100, 0, null), - ): GetWorkflowAlertsResponse { - return client().execute( - AlertingActions.GET_WORKFLOW_ALERTS_ACTION_TYPE, - GetWorkflowAlertsRequest( - table = table!!, - severityLevel = "ALL", - alertState = alertState!!.name, - alertIndex = alertIndex, - associatedAlertsIndex = associatedAlertsIndex, - monitorIds = emptyList(), - workflowIds = listOf(workflowId), - alertIds = alertIds, - getAssociatedAlerts = getAssociatedAlerts!! - ) - ).get() - } + ): GetWorkflowAlertsResponse = + client() + .execute( + AlertingActions.GET_WORKFLOW_ALERTS_ACTION_TYPE, + GetWorkflowAlertsRequest( + table = table!!, + severityLevel = "ALL", + alertState = alertState!!.name, + alertIndex = alertIndex, + associatedAlertsIndex = associatedAlertsIndex, + monitorIds = emptyList(), + workflowIds = listOf(workflowId), + alertIds = alertIds, + getAssociatedAlerts = getAssociatedAlerts!!, + ), + ).get() protected fun refreshIndex(index: String) { client().execute(RefreshAction.INSTANCE, RefreshRequest(index)).get() @@ -317,12 +377,18 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { val ssb = SearchSourceBuilder() ssb.version(true) ssb.query(TermQueryBuilder(Alert.MONITOR_ID_FIELD, id)) - val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() + val searchResponse = + client() + .prepareSearch(indices) + .setRouting(id) + .setSource(ssb) + .get() - return searchResponse.hits.hits.map { - val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } - Finding.parse(xcp) - }.filter { finding -> finding.monitorId == id } + return searchResponse.hits.hits + .map { + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + Finding.parse(xcp) + }.filter { finding -> finding.monitorId == id } } protected fun getFindings( @@ -330,13 +396,13 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { monitorId: String?, findingIndexName: String?, ): List { - - val getFindingsRequest = GetFindingsRequest( - findingId, - Table("asc", "monitor_id", null, 100, 0, null), - monitorId, - findingIndexName - ) + val getFindingsRequest = + GetFindingsRequest( + findingId, + Table("asc", "monitor_id", null, 100, 0, null), + monitorId, + findingIndexName, + ) val getFindingsResponse: GetFindingsResponse = client().execute(AlertingActions.GET_FINDINGS_ACTION_TYPE, getFindingsRequest).get() return getFindingsResponse.findings.map { it.finding }.toList() @@ -346,29 +412,32 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { monitorId: String, version: Long = 1L, fetchSourceContext: FetchSourceContext = FetchSourceContext.FETCH_SOURCE, - ) = client().execute( - AlertingActions.GET_MONITOR_ACTION_TYPE, - GetMonitorRequest(monitorId, version, RestRequest.Method.GET, fetchSourceContext) - ).get() + ) = client() + .execute( + AlertingActions.GET_MONITOR_ACTION_TYPE, + GetMonitorRequest(monitorId, version, RestRequest.Method.GET, fetchSourceContext), + ).get() - override fun getPlugins(): List> { - return listOf( + override fun getPlugins(): List> = + listOf( AlertingPlugin::class.java, ReindexModulePlugin::class.java, MustacheModulePlugin::class.java, PainlessModulePlugin::class.java, - ParentJoinModulePlugin::class.java + ParentJoinModulePlugin::class.java, ) - } protected fun deleteIndex(index: String) { - val response = client().admin().indices().delete(DeleteIndexRequest(index)).get() + val response = + client() + .admin() + .indices() + .delete(DeleteIndexRequest(index)) + .get() assertTrue("Unable to delete index", response.isAcknowledged()) } - override fun resetNodeAfterTest(): Boolean { - return false - } + override fun resetNodeAfterTest(): Boolean = false // merged WorkflowSingleNodeTestCase with this class as we are seeing test setup failures // when multiple test classes implement AlertingSingleNodeTestCase or its child class @@ -386,19 +455,25 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { val ssb = SearchSourceBuilder() ssb.version(true) ssb.query(TermQueryBuilder("_id", id)) - val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() - - return searchResponse.hits.hits.map { it -> - val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } - lateinit var workflow: Workflow - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - xcp.nextToken() - when (xcp.currentName()) { - "workflow" -> workflow = Workflow.parse(xcp) + val searchResponse = + client() + .prepareSearch(indices) + .setRouting(id) + .setSource(ssb) + .get() + + return searchResponse.hits.hits + .map { it -> + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + lateinit var workflow: Workflow + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow" -> workflow = Workflow.parse(xcp) + } } - } - workflow.copy(id = it.id, version = it.version) - }.first() + workflow.copy(id = it.id, version = it.version) + }.first() } protected fun searchWorkflowMetadata( @@ -415,19 +490,25 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { val ssb = SearchSourceBuilder() ssb.version(true) ssb.query(TermQueryBuilder("workflow_metadata.workflow_id", id)) - val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() - - return searchResponse.hits.hits.map { it -> - val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } - lateinit var workflowMetadata: WorkflowMetadata - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - xcp.nextToken() - when (xcp.currentName()) { - "workflow_metadata" -> workflowMetadata = WorkflowMetadata.parse(xcp) + val searchResponse = + client() + .prepareSearch(indices) + .setRouting(id) + .setSource(ssb) + .get() + + return searchResponse.hits.hits + .map { it -> + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + lateinit var workflowMetadata: WorkflowMetadata + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "workflow_metadata" -> workflowMetadata = WorkflowMetadata.parse(xcp) + } } - } - workflowMetadata.copy(id = it.id) - }.first() + workflowMetadata.copy(id = it.id) + }.first() } protected fun searchMonitorMetadata( @@ -444,19 +525,25 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { val ssb = SearchSourceBuilder() ssb.version(true) ssb.query(TermQueryBuilder("_id", id)) - val searchResponse = client().prepareSearch(indices).setRouting(id).setSource(ssb).get() - - return searchResponse.hits.hits.map { it -> - val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } - lateinit var monitorMetadata: MonitorMetadata - while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { - xcp.nextToken() - when (xcp.currentName()) { - "metadata" -> monitorMetadata = MonitorMetadata.parse(xcp) + val searchResponse = + client() + .prepareSearch(indices) + .setRouting(id) + .setSource(ssb) + .get() + + return searchResponse.hits.hits + .map { it -> + val xcp = createParser(JsonXContent.jsonXContent, it.sourceRef).also { it.nextToken() } + lateinit var monitorMetadata: MonitorMetadata + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + xcp.nextToken() + when (xcp.currentName()) { + "metadata" -> monitorMetadata = MonitorMetadata.parse(xcp) + } } - } - monitorMetadata.copy(id = it.id) - }.first() + monitorMetadata.copy(id = it.id) + }.first() } protected fun upsertWorkflow( @@ -464,47 +551,60 @@ abstract class AlertingSingleNodeTestCase : OpenSearchSingleNodeTestCase() { id: String = Workflow.NO_ID, method: RestRequest.Method = RestRequest.Method.POST, ): IndexWorkflowResponse? { - val request = IndexWorkflowRequest( - workflowId = id, - seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, - primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, - refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), - method = method, - workflow = workflow - ) + val request = + IndexWorkflowRequest( + workflowId = id, + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO, + primaryTerm = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + refreshPolicy = WriteRequest.RefreshPolicy.parse("true"), + method = method, + workflow = workflow, + ) return client().execute(AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, request).actionGet() } - protected fun getWorkflowById(id: String): GetWorkflowResponse { - return client().execute( - AlertingActions.GET_WORKFLOW_ACTION_TYPE, - GetWorkflowRequest(id, RestRequest.Method.GET) - ).get() - } + protected fun getWorkflowById(id: String): GetWorkflowResponse = + client() + .execute( + AlertingActions.GET_WORKFLOW_ACTION_TYPE, + GetWorkflowRequest(id, RestRequest.Method.GET), + ).get() - protected fun deleteWorkflow(workflowId: String, deleteDelegateMonitors: Boolean? = null) { - client().execute( - AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, - DeleteWorkflowRequest(workflowId, deleteDelegateMonitors) - ).get() + protected fun deleteWorkflow( + workflowId: String, + deleteDelegateMonitors: Boolean? = null, + ) { + client() + .execute( + AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, + DeleteWorkflowRequest(workflowId, deleteDelegateMonitors), + ).get() } - protected fun executeWorkflow(workflow: Workflow? = null, id: String? = null, dryRun: Boolean = true): ExecuteWorkflowResponse? { + protected fun executeWorkflow( + workflow: Workflow? = null, + id: String? = null, + dryRun: Boolean = true, + ): ExecuteWorkflowResponse? { val request = ExecuteWorkflowRequest(dryRun, TimeValue(Instant.now().toEpochMilli()), id, workflow) return client().execute(ExecuteWorkflowAction.INSTANCE, request).get() } protected fun getIndexSettings(index: String): GetSettingsResponse? { val request = GetSettingsRequest().indices(index) - return client().admin().indices().getSettings(request).get() + return client() + .admin() + .indices() + .getSettings(request) + .get() } - override fun nodeSettings(): Settings { - return Settings.builder() + override fun nodeSettings(): Settings = + Settings + .builder() .put(super.nodeSettings()) .put("opendistro.scheduled_jobs.sweeper.period", TimeValue(5, TimeUnit.SECONDS)) .put("opendistro.scheduled_jobs.enabled", true) .build() - } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/transport/GetRemoteIndexesActionIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/transport/GetRemoteIndexesActionIT.kt index aef16d5ae..0a249e3a7 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/transport/GetRemoteIndexesActionIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/transport/GetRemoteIndexesActionIT.kt @@ -22,24 +22,28 @@ import org.opensearch.cluster.health.ClusterHealthStatus import org.opensearch.common.xcontent.XContentFactory import org.opensearch.commons.alerting.util.string import org.opensearch.core.rest.RestStatus -import java.util.* +import java.util.Locale +import kotlin.test.Test @Suppress("UNCHECKED_CAST") class GetRemoteIndexesActionIT : AlertingRestTestCase() { private var remoteMonitoringEnabled = true private var remoteClusters = listOf() - private val mappingFieldToTypePairs1 = listOf( - "timestamp" to "date", - "color" to "keyword", - "message" to "text", - ) + private val mappingFieldToTypePairs1 = + listOf( + "timestamp" to "date", + "color" to "keyword", + "message" to "text", + ) - private val mappingFieldToTypePairs2 = listOf( - "timestamp" to "date", - "message" to "text", - ) + private val mappingFieldToTypePairs2 = + listOf( + "timestamp" to "date", + "message" to "text", + ) + @Test fun `test with remote monitoring disabled`() { // Disable remote monitoring if not already disabled toggleRemoteMonitoring(false) @@ -50,11 +54,12 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { assertEquals(RestStatus.FORBIDDEN, e.response.restStatus()) assertEquals( "Remote monitoring is not enabled.", - (e.response.asMap()["error"] as Map)["reason"] + (e.response.asMap()["error"] as Map)["reason"], ) } } + @Test fun `test with blank indexes param`() { // Enable remote monitoring if not already enabled toggleRemoteMonitoring(true) @@ -65,25 +70,28 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { assertEquals(RestStatus.BAD_REQUEST, e.response.restStatus()) assertEquals( INVALID_PATTERN_MESSAGE, - (e.response.asMap()["error"] as Map)["reason"] + (e.response.asMap()["error"] as Map)["reason"], ) } } + @Test fun `test with blank include_mappings param`() { // Enable remote monitoring if not already enabled toggleRemoteMonitoring(true) // Create test indexes - val index1 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs1) - ) + val index1 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs1), + ) - val index2 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs2) - ) + val index2 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs2), + ) val expectedNames = listOf(index1, index2) @@ -117,20 +125,23 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { deleteIndex(index2) } + @Test fun `test with FALSE include_mappings param`() { // Enable remote monitoring if not already enabled toggleRemoteMonitoring(true) // Create test indexes - val index1 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs1) - ) + val index1 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs1), + ) - val index2 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs2) - ) + val index2 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs2), + ) val expectedNames = listOf(index1, index2) @@ -164,20 +175,23 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { deleteIndex(index2) } + @Test fun `test with TRUE include_mappings param`() { // Enable remote monitoring if not already enabled toggleRemoteMonitoring(true) // Create test indexes - val index1 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs1) - ) + val index1 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs1), + ) - val index2 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs2) - ) + val index2 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs2), + ) val expectedNames = listOf(index1, index2) @@ -224,20 +238,23 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { deleteIndex(index2) } + @Test fun `test with specific index name`() { // Enable remote monitoring if not already enabled toggleRemoteMonitoring(true) // Create test indexes - val index1 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs1) - ) + val index1 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs1), + ) - val index2 = createTestIndex( - index = randomAlphaOfLength(10).lowercase(Locale.ROOT), - mapping = formatMappingsJson(mappingFieldToTypePairs2) - ) + val index2 = + createTestIndex( + index = randomAlphaOfLength(10).lowercase(Locale.ROOT), + mapping = formatMappingsJson(mappingFieldToTypePairs2), + ) val expectedNames = listOf(index1) @@ -278,9 +295,7 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { deleteIndex(index2) } - private fun getRemoteIndexes(params: String): Response { - return client().makeRequest("GET", "${RestGetRemoteIndexesAction.ROUTE}?$params") - } + private fun getRemoteIndexes(params: String): Response = client().makeRequest("GET", "${RestGetRemoteIndexesAction.ROUTE}?$params") private fun toggleRemoteMonitoring(setting: Boolean) { if (remoteMonitoringEnabled != setting) { @@ -289,8 +304,11 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { val settings = client().getSettings() val updatedSetting = getEnabledSetting(settings) - if (setting) assertTrue(updatedSetting) - else assertFalse(updatedSetting) + if (setting) { + assertTrue(updatedSetting) + } else { + assertFalse(updatedSetting) + } remoteMonitoringEnabled = updatedSetting @@ -313,11 +331,14 @@ class GetRemoteIndexesActionIT : AlertingRestTestCase() { } private fun formatMappingsJson(fieldToTypePairs: List>): String { - val builder = XContentFactory.jsonBuilder() - .startObject() - .startObject("properties") + val builder = + XContentFactory + .jsonBuilder() + .startObject() + .startObject("properties") fieldToTypePairs.forEach { - builder.startObject(it.first) + builder + .startObject(it.first) .field("type", it.second) .endObject() } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionParserTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionParserTests.kt index d3f4613fe..bb4003816 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionParserTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionParserTests.kt @@ -8,69 +8,78 @@ package org.opensearch.alerting.triggeraction import org.junit.Assert import org.opensearch.alerting.triggercondition.parsers.TriggerExpressionParser import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class TriggerExpressionParserTests : OpenSearchTestCase() { - + @Test fun `test trigger expression posix parsing simple AND`() { val eqString = "(query[name=sigma-123] && query[name=sigma-456])" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals("query[name=sigma-123] query[name=sigma-456] && ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple AND`() { val eqString = "(query[name=sigma-123] && query[name=sigma-456]) && query[name=sigma-789]" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals("query[name=sigma-123] query[name=sigma-456] && query[name=sigma-789] && ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple AND with parenthesis`() { val eqString = "(query[name=sigma-123] && query[name=sigma-456]) && (query[name=sigma-789] && query[name=id-2aw34])" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals( "query[name=sigma-123] query[name=sigma-456] && query[name=sigma-789] query[name=id-2aw34] && && ", - equation.toString() + equation.toString(), ) } + @Test fun `test trigger expression posix parsing simple OR`() { val eqString = "(query[name=sigma-123] || query[name=sigma-456])" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals("query[name=sigma-123] query[name=sigma-456] || ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple OR`() { val eqString = "(query[name=sigma-123] || query[name=sigma-456]) || query[name=sigma-789]" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals("query[name=sigma-123] query[name=sigma-456] || query[name=sigma-789] || ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple OR with parenthesis`() { val eqString = "(query[name=sigma-123] || query[name=sigma-456]) || (query[name=sigma-789] || query[name=id-2aw34])" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals( "query[name=sigma-123] query[name=sigma-456] || query[name=sigma-789] query[name=id-2aw34] || || ", - equation.toString() + equation.toString(), ) } + @Test fun `test trigger expression posix parsing simple NOT`() { val eqString = "(query[name=sigma-123] || !query[name=sigma-456])" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals("query[name=sigma-123] query[name=sigma-456] ! || ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple NOT`() { val eqString = "(query[name=sigma-123] && !query[tag=tag-456]) && !(query[name=sigma-789])" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals("query[name=sigma-123] query[tag=tag-456] ! && query[name=sigma-789] ! && ", equation.toString()) } + @Test fun `test trigger expression posix parsing multiple operators with parenthesis`() { val eqString = "(query[name=sigma-123] && query[tag=sev1]) || !(!query[name=sigma-789] || query[name=id-2aw34])" val equation = TriggerExpressionParser(eqString).parse() Assert.assertEquals( "query[name=sigma-123] query[tag=sev1] && query[name=sigma-789] ! query[name=id-2aw34] || ! || ", - equation.toString() + equation.toString(), ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt index 67b8f7e9f..87cdc235d 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/triggeraction/TriggerExpressionResolverTests.kt @@ -9,9 +9,10 @@ import org.junit.Assert import org.opensearch.alerting.triggercondition.parsers.TriggerExpressionParser import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class TriggerExpressionResolverTests : OpenSearchTestCase() { - + @Test fun `test trigger expression evaluation simple AND`() { val eqString = "(query[name=sigma-123] && query[name=sigma-456])" val equation = TriggerExpressionParser(eqString).parse() @@ -22,6 +23,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("1", "2", "3"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation simple AND scenario2`() { val eqString = "(query[name=sigma-123] && query[id=id1456])" val equation = TriggerExpressionParser(eqString).parse() @@ -32,6 +34,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("3"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation simple AND scenario3`() { val eqString = "(query[name=sigma-123] && query[tag=sev2])" val equation = TriggerExpressionParser(eqString).parse() @@ -42,6 +45,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(emptySet(), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation simple OR`() { val eqString = "(query[name=sigma-123] || query[name=sigma-456])" val equation = TriggerExpressionParser(eqString).parse() @@ -52,6 +56,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("1", "2", "3"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation simple OR scenario2`() { val eqString = "(query[name=sigma-123] || query[id=id1456])" val equation = TriggerExpressionParser(eqString).parse() @@ -62,6 +67,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("6", "3", "7", "1", "2", "3"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation simple OR scenario3`() { val eqString = "(query[name=sigma-123] || query[tag=sev2])" val equation = TriggerExpressionParser(eqString).parse() @@ -72,6 +78,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("6", "8", "7"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation simple NOT`() { val eqString = "!(query[name=sigma-456])" val equation = TriggerExpressionParser(eqString).parse() @@ -82,6 +89,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("1", "2", "3"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation AND with NOT`() { val eqString = "(query[name=sigma-123] && !query[name=sigma-456])" val equation = TriggerExpressionParser(eqString).parse() @@ -93,6 +101,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("1", "2", "11"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation OR with NOT`() { val eqString = "(query[name=sigma-123] || !query[id=id1456])" val equation = TriggerExpressionParser(eqString).parse() @@ -104,6 +113,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals(mutableSetOf("6", "3", "7", "13"), equation.evaluate(queryToDocIds)) } + @Test fun `test trigger expression evaluation with multiple operators with parenthesis`() { val eqString = "(query[name=sigma-123] && query[tag=sev1]) || !(!query[name=sigma-789] || query[id=id-2aw34])" val equation = TriggerExpressionParser(eqString).parse() @@ -116,7 +126,7 @@ class TriggerExpressionResolverTests : OpenSearchTestCase() { Assert.assertEquals( "query[name=sigma-123] query[tag=sev1] && query[name=sigma-789] ! query[id=id-2aw34] || ! || ", - equation.toString() + equation.toString(), ) Assert.assertEquals(mutableSetOf("2", "3", "11", "12"), equation.evaluate(queryToDocIds)) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt index ade50f5b1..a85418640 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/AggregationQueryRewriterTests.kt @@ -28,9 +28,10 @@ import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase import java.io.IOException +import kotlin.test.Test class AggregationQueryRewriterTests : OpenSearchTestCase() { - + @Test fun `test RewriteQuery empty previous result`() { val triggers: MutableList = mutableListOf() for (i in 0 until 10) { @@ -44,16 +45,18 @@ class AggregationQueryRewriterTests : OpenSearchTestCase() { Assert.assertEquals(queryBuilder.aggregations().pipelineAggregatorFactories.size, 10) } + @Test fun `skip test RewriteQuery with non-empty previous result`() { val triggers: MutableList = mutableListOf() for (i in 0 until 10) { triggers.add(randomBucketLevelTrigger()) } val queryBuilder = SearchSourceBuilder() - val termAgg: AggregationBuilder = CompositeAggregationBuilder( - "testPath", - listOf(TermsValuesSourceBuilder("k1"), TermsValuesSourceBuilder("k2")) - ) + val termAgg: AggregationBuilder = + CompositeAggregationBuilder( + "testPath", + listOf(TermsValuesSourceBuilder("k1"), TermsValuesSourceBuilder("k2")), + ) queryBuilder.aggregation(termAgg) val aggTriggersAfterKey = mutableMapOf() for (trigger in triggers) { @@ -74,6 +77,7 @@ class AggregationQueryRewriterTests : OpenSearchTestCase() { } } + @Test fun `test RewriteQuery with non aggregation trigger`() { val triggers: MutableList = mutableListOf() for (i in 0 until 10) { @@ -87,42 +91,44 @@ class AggregationQueryRewriterTests : OpenSearchTestCase() { Assert.assertEquals(queryBuilder.aggregations().pipelineAggregatorFactories.size, 0) } + @Test fun `test after keys from search response`() { - val responseContent = """ - { - "took" : 97, - "timed_out" : false, - "_shards" : { - "total" : 3, - "successful" : 3, - "skipped" : 0, - "failed" : 0 - }, - "hits" : { - "total" : { - "value" : 20, - "relation" : "eq" - }, - "max_score" : null, - "hits" : [ ] - }, - "aggregations" : { - "composite#testPath" : { - "after_key" : { - "sport" : "Basketball" + val responseContent = + """ + { + "took" : 97, + "timed_out" : false, + "_shards" : { + "total" : 3, + "successful" : 3, + "skipped" : 0, + "failed" : 0 + }, + "hits" : { + "total" : { + "value" : 20, + "relation" : "eq" + }, + "max_score" : null, + "hits" : [ ] }, - "buckets" : [ - { - "key" : { + "aggregations" : { + "composite#testPath" : { + "after_key" : { "sport" : "Basketball" }, - "doc_count" : 5 + "buckets" : [ + { + "key" : { + "sport" : "Basketball" + }, + "doc_count" : 5 + } + ] } - ] + } } - } - } - """.trimIndent() + """.trimIndent() val aggTriggers: MutableList = mutableListOf(randomBucketLevelTrigger()) val tradTriggers: MutableList = mutableListOf(randomQueryLevelTrigger()) @@ -135,140 +141,145 @@ class AggregationQueryRewriterTests : OpenSearchTestCase() { Assert.assertEquals(afterKeys2.size, 0) } + @Test fun `test after keys from search responses for multiple bucket paths and different page counts`() { - val firstResponseContent = """ - { - "took" : 0, - "timed_out" : false, - "_shards" : { - "total" : 1, - "successful" : 1, - "skipped" : 0, - "failed" : 0 - }, - "hits" : { - "total" : { - "value" : 4675, - "relation" : "eq" - }, - "max_score" : null, - "hits" : [ ] - }, - "aggregations" : { - "composite2#smallerResults" : { - "after_key" : { - "category" : "Women's Shoes" + val firstResponseContent = + """ + { + "took" : 0, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 }, - "buckets" : [ - { - "key" : { - "category" : "Women's Shoes" - }, - "doc_count" : 1136 - } - ] - }, - "composite3#largerResults" : { - "after_key" : { - "user" : "abigail" + "hits" : { + "total" : { + "value" : 4675, + "relation" : "eq" + }, + "max_score" : null, + "hits" : [ ] }, - "buckets" : [ - { - "key" : { - "user" : "abd" + "aggregations" : { + "composite2#smallerResults" : { + "after_key" : { + "category" : "Women's Shoes" }, - "doc_count" : 188 + "buckets" : [ + { + "key" : { + "category" : "Women's Shoes" + }, + "doc_count" : 1136 + } + ] }, - { - "key" : { + "composite3#largerResults" : { + "after_key" : { "user" : "abigail" }, - "doc_count" : 128 + "buckets" : [ + { + "key" : { + "user" : "abd" + }, + "doc_count" : 188 + }, + { + "key" : { + "user" : "abigail" + }, + "doc_count" : 128 + } + ] } - ] + } } - } - } - """.trimIndent() + """.trimIndent() - val secondResponseContent = """ - { - "took" : 0, - "timed_out" : false, - "_shards" : { - "total" : 1, - "successful" : 1, - "skipped" : 0, - "failed" : 0 - }, - "hits" : { - "total" : { - "value" : 4675, - "relation" : "eq" - }, - "max_score" : null, - "hits" : [ ] - }, - "aggregations" : { - "composite2#smallerResults" : { - "buckets" : [ ] - }, - "composite3#largerResults" : { - "after_key" : { - "user" : "boris" + val secondResponseContent = + """ + { + "took" : 0, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 }, - "buckets" : [ - { - "key" : { - "user" : "betty" - }, - "doc_count" : 148 + "hits" : { + "total" : { + "value" : 4675, + "relation" : "eq" }, - { - "key" : { + "max_score" : null, + "hits" : [ ] + }, + "aggregations" : { + "composite2#smallerResults" : { + "buckets" : [ ] + }, + "composite3#largerResults" : { + "after_key" : { "user" : "boris" }, - "doc_count" : 74 + "buckets" : [ + { + "key" : { + "user" : "betty" + }, + "doc_count" : 148 + }, + { + "key" : { + "user" : "boris" + }, + "doc_count" : 74 + } + ] } - ] + } } - } - } - """.trimIndent() + """.trimIndent() - val thirdResponseContent = """ - { - "took" : 0, - "timed_out" : false, - "_shards" : { - "total" : 1, - "successful" : 1, - "skipped" : 0, - "failed" : 0 - }, - "hits" : { - "total" : { - "value" : 4675, - "relation" : "eq" - }, - "max_score" : null, - "hits" : [ ] - }, - "aggregations" : { - "composite2#smallerResults" : { - "buckets" : [ ] - }, - "composite3#largerResults" : { - "buckets" : [ ] + val thirdResponseContent = + """ + { + "took" : 0, + "timed_out" : false, + "_shards" : { + "total" : 1, + "successful" : 1, + "skipped" : 0, + "failed" : 0 + }, + "hits" : { + "total" : { + "value" : 4675, + "relation" : "eq" + }, + "max_score" : null, + "hits" : [ ] + }, + "aggregations" : { + "composite2#smallerResults" : { + "buckets" : [ ] + }, + "composite3#largerResults" : { + "buckets" : [ ] + } + } } - } - } - """.trimIndent() + """.trimIndent() - val bucketLevelTriggers: MutableList = mutableListOf( - randomBucketLevelTrigger(bucketSelector = randomBucketSelectorExtAggregationBuilder(parentBucketPath = "smallerResults")), - randomBucketLevelTrigger(bucketSelector = randomBucketSelectorExtAggregationBuilder(parentBucketPath = "largerResults")) - ) + val bucketLevelTriggers: MutableList = + mutableListOf( + randomBucketLevelTrigger(bucketSelector = randomBucketSelectorExtAggregationBuilder(parentBucketPath = "smallerResults")), + randomBucketLevelTrigger(bucketSelector = randomBucketSelectorExtAggregationBuilder(parentBucketPath = "largerResults")), + ) var searchResponse = SearchResponse.fromXContent(createParser(JsonXContent.jsonXContent, firstResponseContent)) val afterKeys = AggregationQueryRewriter.getAfterKeysFromSearchResponse(searchResponse, bucketLevelTriggers, null) @@ -296,33 +307,39 @@ class AggregationQueryRewriterTests : OpenSearchTestCase() { val entries = ClusterModule.getNamedXWriteables() entries.add( NamedXContentRegistry.Entry( - Aggregation::class.java, ParseField(CompositeAggregationBuilder.NAME), + Aggregation::class.java, + ParseField(CompositeAggregationBuilder.NAME), CheckedFunction { parser: XContentParser? -> ParsedComposite.fromXContent( - parser, "testPath" + parser, + "testPath", ) - } - ) + }, + ), ) entries.add( NamedXContentRegistry.Entry( - Aggregation::class.java, ParseField(CompositeAggregationBuilder.NAME + "2"), + Aggregation::class.java, + ParseField(CompositeAggregationBuilder.NAME + "2"), CheckedFunction { parser: XContentParser? -> ParsedComposite.fromXContent( - parser, "smallerResults" + parser, + "smallerResults", ) - } - ) + }, + ), ) entries.add( NamedXContentRegistry.Entry( - Aggregation::class.java, ParseField(CompositeAggregationBuilder.NAME + "3"), + Aggregation::class.java, + ParseField(CompositeAggregationBuilder.NAME + "3"), CheckedFunction { parser: XContentParser? -> ParsedComposite.fromXContent( - parser, "largerResults" + parser, + "largerResults", ) - } - ) + }, + ), ) return NamedXContentRegistry(entries) } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/AlertingUtilsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/AlertingUtilsTests.kt index 31dcb6591..89ae366a6 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/AlertingUtilsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/AlertingUtilsTests.kt @@ -15,32 +15,39 @@ import org.opensearch.alerting.randomTemplateScript import org.opensearch.alerting.script.BucketLevelTriggerExecutionContext import org.opensearch.alerting.script.DocumentLevelTriggerExecutionContext import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class AlertingUtilsTests : OpenSearchTestCase() { + @Test fun `test parseSampleDocTags only returns expected tags`() { val expectedDocSourceTags = (0..3).map { "field$it" } - val unexpectedDocSourceTags = ((expectedDocSourceTags.size + 1)..(expectedDocSourceTags.size + 5)) - .map { "field$it" } + val unexpectedDocSourceTags = + ((expectedDocSourceTags.size + 1)..(expectedDocSourceTags.size + 5)) + .map { "field$it" } val unexpectedTagsScriptSource = unexpectedDocSourceTags.joinToString { field -> "$field = {{$field}}" } - val expectedTagsScriptSource = unexpectedTagsScriptSource + """ + val expectedTagsScriptSource = + unexpectedTagsScriptSource + + """ ${unexpectedDocSourceTags.joinToString("\n") { field -> "$field = {{$field}}" }} {{#alerts}} {{#${AlertContext.SAMPLE_DOCS_FIELD}}} ${expectedDocSourceTags.joinToString("\n") { field -> "$field = {{_source.$field}}" }} {{/${AlertContext.SAMPLE_DOCS_FIELD}}} {{/alerts}} - """.trimIndent() + """.trimIndent() // Action that prints doc source data - val trigger1 = randomDocumentLevelTrigger( - actions = listOf(randomAction(template = randomTemplateScript(source = expectedTagsScriptSource))) - ) + val trigger1 = + randomDocumentLevelTrigger( + actions = listOf(randomAction(template = randomTemplateScript(source = expectedTagsScriptSource))), + ) // Action that does not print doc source data - val trigger2 = randomDocumentLevelTrigger( - actions = listOf(randomAction(template = randomTemplateScript(source = unexpectedTagsScriptSource))) - ) + val trigger2 = + randomDocumentLevelTrigger( + actions = listOf(randomAction(template = randomTemplateScript(source = unexpectedTagsScriptSource))), + ) // No actions val trigger3 = randomDocumentLevelTrigger(actions = listOf()) @@ -52,127 +59,155 @@ class AlertingUtilsTests : OpenSearchTestCase() { unexpectedDocSourceTags.forEach { tag -> assertFalse(tags.contains(tag)) } } + @Test fun `test printsSampleDocData entire ctx tag returns TRUE`() { val tag = "{{ctx}}" - val triggers = listOf( - randomBucketLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), - randomDocumentLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))) - ) + val triggers = + listOf( + randomBucketLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), + randomDocumentLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), + ) triggers.forEach { trigger -> assertTrue(printsSampleDocData(trigger)) } } + @Test fun `test printsSampleDocData entire alerts tag returns TRUE`() { - val triggers = listOf( - randomBucketLevelTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = "{{ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}}" - ) - ) - ) - ), - randomDocumentLevelTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = "{{ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}}" - ) - ) - ) + val triggers = + listOf( + randomBucketLevelTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = "{{ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}}", + ), + ), + ), + ), + randomDocumentLevelTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = "{{ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}}", + ), + ), + ), + ), ) - ) triggers.forEach { trigger -> assertTrue(printsSampleDocData(trigger)) } } + @Test fun `test printsSampleDocData entire sample_docs tag returns TRUE`() { - val triggers = listOf( - randomBucketLevelTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = """ - {{#ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} - {{${AlertContext.SAMPLE_DOCS_FIELD}}} - {{/ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} - """.trimIndent() - ) - ) - ) - ), - randomDocumentLevelTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = """ - {{#ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} - {{${AlertContext.SAMPLE_DOCS_FIELD}}} - {{/ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} - """.trimIndent() - ) - ) - ) + val triggers = + listOf( + randomBucketLevelTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = + """ + {{#ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} + {{${AlertContext.SAMPLE_DOCS_FIELD}}} + {{/ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} + """.trimIndent(), + ), + ), + ), + ), + randomDocumentLevelTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = + """ + {{#ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} + {{${AlertContext.SAMPLE_DOCS_FIELD}}} + {{/ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} + """.trimIndent(), + ), + ), + ), + ), ) - ) triggers.forEach { trigger -> assertTrue(printsSampleDocData(trigger)) } } + @Test fun `test printsSampleDocData sample_docs iteration block returns TRUE`() { - val triggers = listOf( - randomBucketLevelTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = """ - {{#ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} - "{{#${AlertContext.SAMPLE_DOCS_FIELD}}}" - {{_source.field}} - "{{/${AlertContext.SAMPLE_DOCS_FIELD}}}" - {{/ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} - """.trimIndent() - ) - ) - ) - ), - randomDocumentLevelTrigger( - actions = listOf( - randomAction( - template = randomTemplateScript( - source = """ - {{#ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} - {{#${AlertContext.SAMPLE_DOCS_FIELD}}} - {{_source.field}} - {{/${AlertContext.SAMPLE_DOCS_FIELD}}} - {{/ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} - """.trimIndent() - ) - ) - ) + val triggers = + listOf( + randomBucketLevelTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = + """ + {{#ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} + "{{#${AlertContext.SAMPLE_DOCS_FIELD}}}" + {{_source.field}} + "{{/${AlertContext.SAMPLE_DOCS_FIELD}}}" + {{/ctx.${BucketLevelTriggerExecutionContext.NEW_ALERTS_FIELD}}} + """.trimIndent(), + ), + ), + ), + ), + randomDocumentLevelTrigger( + actions = + listOf( + randomAction( + template = + randomTemplateScript( + source = + """ + {{#ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} + {{#${AlertContext.SAMPLE_DOCS_FIELD}}} + {{_source.field}} + {{/${AlertContext.SAMPLE_DOCS_FIELD}}} + {{/ctx.${DocumentLevelTriggerExecutionContext.ALERTS_FIELD}}} + """.trimIndent(), + ), + ), + ), + ), ) - ) triggers.forEach { trigger -> assertTrue(printsSampleDocData(trigger)) } } + @Test fun `test printsSampleDocData unrelated tag returns FALSE`() { val tag = "{{ctx.monitor.name}}" - val triggers = listOf( - randomBucketLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), - randomDocumentLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))) - ) + val triggers = + listOf( + randomBucketLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), + randomDocumentLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), + ) triggers.forEach { trigger -> assertFalse(printsSampleDocData(trigger)) } } + @Test fun `test printsSampleDocData unsupported trigger types return FALSE`() { val tag = "{{ctx}}" - val triggers = listOf( - randomQueryLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), - randomChainedAlertTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))) - ) + val triggers = + listOf( + randomQueryLevelTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), + randomChainedAlertTrigger(actions = listOf(randomAction(template = randomTemplateScript(source = tag)))), + ) triggers.forEach { trigger -> assertFalse(printsSampleDocData(trigger)) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt index 2295c8b59..5d5cdd652 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/AnomalyDetectionUtilsTests.kt @@ -16,67 +16,85 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class AnomalyDetectionUtilsTests : OpenSearchTestCase() { - + @Test fun `test is ad monitor`() { - val monitor = randomQueryLevelMonitor( - inputs = listOf( - SearchInput( - listOf(ANOMALY_RESULT_INDEX), - SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor( + inputs = + listOf( + SearchInput( + listOf(ANOMALY_RESULT_INDEX), + SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) assertTrue(isADMonitor(monitor)) } + @Test fun `test not ad monitor if monitor have no inputs`() { - - val monitor = randomQueryLevelMonitor( - inputs = listOf() - ) + val monitor = + randomQueryLevelMonitor( + inputs = listOf(), + ) assertFalse(isADMonitor(monitor)) } + @Test fun `test not ad monitor if monitor input is not search input`() { - val monitor = randomQueryLevelMonitor( - inputs = listOf(object : Input { - override fun name(): String { - TODO("Not yet implemented") - } - - override fun writeTo(out: StreamOutput?) { - TODO("Not yet implemented") - } - - override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { - TODO("Not yet implemented") - } - }) - ) + val monitor = + randomQueryLevelMonitor( + inputs = + listOf( + object : Input { + override fun name(): String { + TODO("Not yet implemented") + } + + override fun writeTo(out: StreamOutput?) { + TODO("Not yet implemented") + } + + override fun toXContent( + builder: XContentBuilder?, + params: ToXContent.Params?, + ): XContentBuilder { + TODO("Not yet implemented") + } + }, + ), + ) assertFalse(isADMonitor(monitor)) } + @Test fun `test not ad monitor if monitor input has more than 1 indices`() { - val monitor = randomQueryLevelMonitor( - inputs = listOf( - SearchInput( - listOf(randomAlphaOfLength(5), randomAlphaOfLength(5)), - SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) - ) + val monitor = + randomQueryLevelMonitor( + inputs = + listOf( + SearchInput( + listOf(randomAlphaOfLength(5), randomAlphaOfLength(5)), + SearchSourceBuilder().query(QueryBuilders.matchAllQuery()), + ), + ), ) - ) assertFalse(isADMonitor(monitor)) } + @Test fun `test not ad monitor if monitor input's index name is not AD result index`() { - val monitor = randomQueryLevelMonitor( - inputs = listOf(SearchInput(listOf(randomAlphaOfLength(5)), SearchSourceBuilder().query(QueryBuilders.matchAllQuery()))) - ) + val monitor = + randomQueryLevelMonitor( + inputs = listOf(SearchInput(listOf(randomAlphaOfLength(5)), SearchSourceBuilder().query(QueryBuilders.matchAllQuery()))), + ) assertFalse(isADMonitor(monitor)) } + @Test fun `test add user role filter with null user`() { val searchSourceBuilder = SearchSourceBuilder() addUserBackendRolesFilter(null, searchSourceBuilder) @@ -84,10 +102,11 @@ class AnomalyDetectionUtilsTests : OpenSearchTestCase() { "{\"query\":{\"bool\":{\"must_not\":[{\"nested\":{\"query\":{\"exists\":{\"field\":\"user\",\"boost\":1.0}}," + "\"path\":\"user\",\"ignore_unmapped\":false,\"score_mode\":\"none\",\"boost\":1.0}}],\"adjust_pure_negative\":true," + "\"boost\":1.0}}}", - searchSourceBuilder.toString() + searchSourceBuilder.toString(), ) } + @Test fun `test add user role filter with user with empty name`() { val searchSourceBuilder = SearchSourceBuilder() addUserBackendRolesFilter(User("", mutableListOf(), mutableListOf(), mutableListOf()), searchSourceBuilder) @@ -95,63 +114,72 @@ class AnomalyDetectionUtilsTests : OpenSearchTestCase() { "{\"query\":{\"bool\":{\"must_not\":[{\"nested\":{\"query\":{\"exists\":{\"field\":\"user\",\"boost\":1.0}}," + "\"path\":\"user\",\"ignore_unmapped\":false,\"score_mode\":\"none\",\"boost\":1.0}}],\"adjust_pure_negative\":true," + "\"boost\":1.0}}}", - searchSourceBuilder.toString() + searchSourceBuilder.toString(), ) } + @Test fun `test add user role filter with null user backend role`() { val searchSourceBuilder = SearchSourceBuilder() addUserBackendRolesFilter( User( - randomAlphaOfLength(5), null, listOf(randomAlphaOfLength(5)), - listOf(randomAlphaOfLength(5)) + randomAlphaOfLength(5), + null, + listOf(randomAlphaOfLength(5)), + listOf(randomAlphaOfLength(5)), ), - searchSourceBuilder + searchSourceBuilder, ) assertEquals( "{\"query\":{\"bool\":{\"must\":[{\"nested\":{\"query\":{\"exists\":{\"field\":\"user\",\"boost\":1.0}}," + "\"path\":\"user\",\"ignore_unmapped\":false,\"score_mode\":\"none\",\"boost\":1.0}}],\"must_not\":[{\"nested\":" + "{\"query\":{\"exists\":{\"field\":\"user.backend_roles.keyword\",\"boost\":1.0}},\"path\":\"user\",\"ignore_unmapped\"" + ":false,\"score_mode\":\"none\",\"boost\":1.0}}],\"adjust_pure_negative\":true,\"boost\":1.0}}}", - searchSourceBuilder.toString() + searchSourceBuilder.toString(), ) } + @Test fun `test add user role filter with empty user backend role`() { val searchSourceBuilder = SearchSourceBuilder() addUserBackendRolesFilter( User( - randomAlphaOfLength(5), listOf(), listOf(randomAlphaOfLength(5)), - listOf(randomAlphaOfLength(5)) + randomAlphaOfLength(5), + listOf(), + listOf(randomAlphaOfLength(5)), + listOf(randomAlphaOfLength(5)), ), - searchSourceBuilder + searchSourceBuilder, ) assertEquals( "{\"query\":{\"bool\":{\"must\":[{\"nested\":{\"query\":{\"exists\":{\"field\":\"user\",\"boost\":1.0}}," + "\"path\":\"user\",\"ignore_unmapped\":false,\"score_mode\":\"none\",\"boost\":1.0}}],\"must_not\":[{\"nested\":" + "{\"query\":{\"exists\":{\"field\":\"user.backend_roles.keyword\",\"boost\":1.0}},\"path\":\"user\",\"ignore_unmapped\"" + ":false,\"score_mode\":\"none\",\"boost\":1.0}}],\"adjust_pure_negative\":true,\"boost\":1.0}}}", - searchSourceBuilder.toString() + searchSourceBuilder.toString(), ) } + @Test fun `test add user role filter with normal user backend role`() { val searchSourceBuilder = SearchSourceBuilder() val backendRole1 = randomAlphaOfLength(5) val backendRole2 = randomAlphaOfLength(5) addUserBackendRolesFilter( User( - randomAlphaOfLength(5), listOf(backendRole1, backendRole2), listOf(randomAlphaOfLength(5)), - listOf(randomAlphaOfLength(5)) + randomAlphaOfLength(5), + listOf(backendRole1, backendRole2), + listOf(randomAlphaOfLength(5)), + listOf(randomAlphaOfLength(5)), ), - searchSourceBuilder + searchSourceBuilder, ) assertEquals( "{\"query\":{\"bool\":{\"must\":[{\"nested\":{\"query\":{\"terms\":{\"user.backend_roles.keyword\":" + "[\"$backendRole1\",\"$backendRole2\"]," + "\"boost\":1.0}},\"path\":\"user\",\"ignore_unmapped\":false,\"score_mode\":\"none\",\"boost\":1.0}}]," + "\"adjust_pure_negative\":true,\"boost\":1.0}}}", - searchSourceBuilder.toString() + searchSourceBuilder.toString(), ) } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/IndexUtilsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/IndexUtilsTests.kt index 03f03abeb..05399520f 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/IndexUtilsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/IndexUtilsTests.kt @@ -9,10 +9,11 @@ import org.opensearch.alerting.parser import org.opensearch.cluster.metadata.IndexMetadata import org.opensearch.test.OpenSearchTestCase import java.lang.NumberFormatException +import kotlin.test.Test import kotlin.test.assertFailsWith class IndexUtilsTests : OpenSearchTestCase() { - + @Test fun `test get schema version`() { val message = "{\"user\":{ \"name\":\"test\"},\"_meta\":{\"schema_version\": 1}}" @@ -20,6 +21,7 @@ class IndexUtilsTests : OpenSearchTestCase() { assertEquals(1, schemaVersion) } + @Test fun `test get schema version without _meta`() { val message = "{\"user\":{ \"name\":\"test\"}}" @@ -27,6 +29,7 @@ class IndexUtilsTests : OpenSearchTestCase() { assertEquals(0, schemaVersion) } + @Test fun `test get schema version without schema_version`() { val message = "{\"user\":{ \"name\":\"test\"},\"_meta\":{\"test\": 1}}" @@ -34,6 +37,7 @@ class IndexUtilsTests : OpenSearchTestCase() { assertEquals(0, schemaVersion) } + @Test fun `test get schema version with negative schema_version`() { val message = "{\"user\":{ \"name\":\"test\"},\"_meta\":{\"schema_version\": -1}}" @@ -42,6 +46,7 @@ class IndexUtilsTests : OpenSearchTestCase() { } } + @Test fun `test get schema version with wrong schema_version`() { val message = "{\"user\":{ \"name\":\"test\"},\"_meta\":{\"schema_version\": \"wrong\"}}" @@ -50,11 +55,13 @@ class IndexUtilsTests : OpenSearchTestCase() { } } + @Test fun `test should update index without original version`() { - val indexContent = "{\"testIndex\":{\"settings\":{\"index\":{\"creation_date\":\"1558407515699\"," + - "\"number_of_shards\":\"1\",\"number_of_replicas\":\"1\",\"uuid\":\"t-VBBW6aR6KpJ3XP5iISOA\"," + - "\"version\":{\"created\":\"6040399\"},\"provided_name\":\"data_test\"}},\"mapping_version\":123," + - "\"settings_version\":123,\"aliases_version\":1,\"mappings\":{\"_doc\":{\"properties\":{\"name\":{\"type\":\"keyword\"}}}}}}" + val indexContent = + "{\"testIndex\":{\"settings\":{\"index\":{\"creation_date\":\"1558407515699\"," + + "\"number_of_shards\":\"1\",\"number_of_replicas\":\"1\",\"uuid\":\"t-VBBW6aR6KpJ3XP5iISOA\"," + + "\"version\":{\"created\":\"6040399\"},\"provided_name\":\"data_test\"}},\"mapping_version\":123," + + "\"settings_version\":123,\"aliases_version\":1,\"mappings\":{\"_doc\":{\"properties\":{\"name\":{\"type\":\"keyword\"}}}}}}" val newMapping = "{\"_meta\":{\"schema_version\":10},\"properties\":{\"name\":{\"type\":\"keyword\"}}}" val index: IndexMetadata = IndexMetadata.fromXContent(parser(indexContent)) @@ -62,12 +69,14 @@ class IndexUtilsTests : OpenSearchTestCase() { assertTrue(shouldUpdateIndex) } + @Test fun `test should update index with lagged version`() { - val indexContent = "{\"testIndex\":{\"settings\":{\"index\":{\"creation_date\":\"1558407515699\"," + - "\"number_of_shards\":\"1\",\"number_of_replicas\":\"1\",\"uuid\":\"t-VBBW6aR6KpJ3XP5iISOA\"," + - "\"version\":{\"created\":\"6040399\"},\"provided_name\":\"data_test\"}},\"mapping_version\":123," + - "\"settings_version\":123,\"aliases_version\":1,\"mappings\":{\"_doc\":{\"_meta\":{\"schema_version\":1},\"properties\":" + - "{\"name\":{\"type\":\"keyword\"}}}}}}" + val indexContent = + "{\"testIndex\":{\"settings\":{\"index\":{\"creation_date\":\"1558407515699\"," + + "\"number_of_shards\":\"1\",\"number_of_replicas\":\"1\",\"uuid\":\"t-VBBW6aR6KpJ3XP5iISOA\"," + + "\"version\":{\"created\":\"6040399\"},\"provided_name\":\"data_test\"}},\"mapping_version\":123," + + "\"settings_version\":123,\"aliases_version\":1,\"mappings\":{\"_doc\":{\"_meta\":{\"schema_version\":1},\"properties\":" + + "{\"name\":{\"type\":\"keyword\"}}}}}}" val newMapping = "{\"_meta\":{\"schema_version\":10},\"properties\":{\"name\":{\"type\":\"keyword\"}}}" val index: IndexMetadata = IndexMetadata.fromXContent(parser(indexContent)) @@ -75,12 +84,14 @@ class IndexUtilsTests : OpenSearchTestCase() { assertTrue(shouldUpdateIndex) } + @Test fun `test should update index with same version`() { - val indexContent = "{\"testIndex\":{\"settings\":{\"index\":{\"creation_date\":\"1558407515699\"," + - "\"number_of_shards\":\"1\",\"number_of_replicas\":\"1\",\"uuid\":\"t-VBBW6aR6KpJ3XP5iISOA\"," + - "\"version\":{\"created\":\"6040399\"},\"provided_name\":\"data_test\"}},\"mapping_version\":\"1\"," + - "\"settings_version\":\"1\",\"aliases_version\":\"1\",\"mappings\":" + - "{\"_doc\":{\"_meta\":{\"schema_version\":1},\"properties\":{\"name\":{\"type\":\"keyword\"}}}}}}" + val indexContent = + "{\"testIndex\":{\"settings\":{\"index\":{\"creation_date\":\"1558407515699\"," + + "\"number_of_shards\":\"1\",\"number_of_replicas\":\"1\",\"uuid\":\"t-VBBW6aR6KpJ3XP5iISOA\"," + + "\"version\":{\"created\":\"6040399\"},\"provided_name\":\"data_test\"}},\"mapping_version\":\"1\"," + + "\"settings_version\":\"1\",\"aliases_version\":\"1\",\"mappings\":" + + "{\"_doc\":{\"_meta\":{\"schema_version\":1},\"properties\":{\"name\":{\"type\":\"keyword\"}}}}}}" val newMapping = "{\"_meta\":{\"schema_version\":1},\"properties\":{\"name\":{\"type\":\"keyword\"}}}" val xContentParser = parser(indexContent) val index: IndexMetadata = IndexMetadata.fromXContent(xContentParser) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesWrappersIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesWrappersIT.kt index 9712b4213..f75180363 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesWrappersIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatIndicesWrappersIT.kt @@ -5,6 +5,7 @@ package org.opensearch.alerting.util.clusterMetricsMonitorHelpers +import kotlinx.coroutines.runBlocking import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.randomClusterMetricsInput import org.opensearch.alerting.util.clusterMetricsMonitorHelpers.CatIndicesResponseWrapper.Companion.WRAPPER_FIELD @@ -12,10 +13,12 @@ import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.alerting.model.ClusterMetricsInput import org.opensearch.core.common.Strings import org.opensearch.test.OpenSearchSingleNodeTestCase +import kotlin.test.Test class CatIndicesWrappersIT : OpenSearchSingleNodeTestCase() { private val path = ClusterMetricsInput.ClusterMetricType.CAT_INDICES.defaultPath + @Test fun `test CatIndicesRequestWrapper validate valid pathParams`() { // GIVEN val pathParams = "index1,index-name-2,index-3" @@ -30,6 +33,7 @@ class CatIndicesWrappersIT : OpenSearchSingleNodeTestCase() { assertEquals(3, requestWrapper.indicesStatsRequest.indices().size) } + @Test fun `test CatIndicesRequestWrapper validate without providing pathParams`() { // GIVEN & WHEN val requestWrapper = CatIndicesRequestWrapper() @@ -41,6 +45,7 @@ class CatIndicesWrappersIT : OpenSearchSingleNodeTestCase() { assertNull(requestWrapper.indicesStatsRequest.indices()) } + @Test fun `test CatIndicesRequestWrapper validate blank pathParams`() { // GIVEN val pathParams = " " @@ -55,6 +60,7 @@ class CatIndicesWrappersIT : OpenSearchSingleNodeTestCase() { assertNull(requestWrapper.indicesStatsRequest.indices()) } + @Test fun `test CatIndicesRequestWrapper validate empty pathParams`() { // GIVEN val pathParams = "" @@ -69,6 +75,7 @@ class CatIndicesWrappersIT : OpenSearchSingleNodeTestCase() { assertNull(requestWrapper.indicesStatsRequest.indices()) } + @Test fun `test CatIndicesRequestWrapper validate invalid pathParams`() { // GIVEN val pathParams = "_index1,index^2" @@ -77,97 +84,115 @@ class CatIndicesWrappersIT : OpenSearchSingleNodeTestCase() { assertThrows(IllegalArgumentException::class.java) { CatIndicesRequestWrapper(pathParams = pathParams) } } - suspend fun `test CatIndicesResponseWrapper returns with only indices in pathParams`() { - // GIVEN - val testIndices = (1..5).map { - "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) - }.toMap() - - testIndices.forEach { (indexName, docCount) -> - repeat(docCount) { - val docId = (it + 1).toString() - val docMessage = """ - { - "message": "$indexName doc num $docId" - } - """.trimIndent() - indexDoc(indexName, docId, docMessage) + @Test + fun `test CatIndicesResponseWrapper returns with only indices in pathParams`() = + runBlocking { + // GIVEN + val testIndices = + (1..5) + .map { + "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) + }.toMap() + + testIndices.forEach { (indexName, docCount) -> + repeat(docCount) { + val docId = (it + 1).toString() + val docMessage = + """ + { + "message": "$indexName doc num $docId" + } + """.trimIndent() + indexDoc(indexName, docId, docMessage) + } } - } /* Creating a subset of indices to use for the pathParams to test that all indices on the cluster ARE NOT returned. - */ - val pathParamsIndices = testIndices.keys.toList().subList(1, testIndices.size - 1) - val pathParams = pathParamsIndices.joinToString(",") - val input = randomClusterMetricsInput(path = path, pathParams = pathParams) + */ + val pathParamsIndices = testIndices.keys.toList().subList(1, testIndices.size - 1) + val pathParams = pathParamsIndices.joinToString(",") + val input = randomClusterMetricsInput(path = path, pathParams = pathParams) + + // WHEN + val responseMap = (executeTransportAction(input, client())).toMap() + + // THEN + val shards = responseMap[WRAPPER_FIELD] as List> + val returnedIndices = + shards.map { (it[CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD] as String) to it }.toMap() + + assertEquals(pathParamsIndices.size, returnedIndices.keys.size) + testIndices.forEach { (indexName, docCount) -> + if (pathParamsIndices.contains(indexName)) { + assertEquals( + indexName, + returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD) as String, + ) + assertEquals( + docCount.toString(), + returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.DOCS_COUNT_FIELD) as String, + ) + } + } + } - // WHEN - val responseMap = (executeTransportAction(input, client())).toMap() + @Test + fun `test CatIndicesResponseWrapper returns with all indices when empty pathParams`() = + runBlocking { + // GIVEN + val testIndices = + (1..5) + .map { + "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) + }.toMap() + + testIndices.forEach { (indexName, docCount) -> + repeat(docCount) { + val docId = (it + 1).toString() + val docMessage = + """ + { + "message": "$indexName doc num $docId" + } + """.trimIndent() + indexDoc(indexName, docId, docMessage) + } + } - // THEN - val shards = responseMap[WRAPPER_FIELD] as List> - val returnedIndices = - shards.map { (it[CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD] as String) to it }.toMap() + val input = randomClusterMetricsInput(path = path) + + // WHEN + val responseMap = (executeTransportAction(input, client())).toMap() + + // THEN + val shards = responseMap[WRAPPER_FIELD] as List> + val returnedIndices = + shards.map { (it[CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD] as String) to it }.toMap() - assertEquals(pathParamsIndices.size, returnedIndices.keys.size) - testIndices.forEach { (indexName, docCount) -> - if (pathParamsIndices.contains(indexName)) { + assertEquals(testIndices.size, returnedIndices.keys.size) + testIndices.forEach { (indexName, docCount) -> assertEquals( indexName, - returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD) as String + returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD) as String, ) assertEquals( docCount.toString(), - returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.DOCS_COUNT_FIELD) as String + returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.DOCS_COUNT_FIELD) as String, ) } } - } - - suspend fun `test CatIndicesResponseWrapper returns with all indices when empty pathParams`() { - // GIVEN - val testIndices = (1..5).map { - "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) - }.toMap() - - testIndices.forEach { (indexName, docCount) -> - repeat(docCount) { - val docId = (it + 1).toString() - val docMessage = """ - { - "message": "$indexName doc num $docId" - } - """.trimIndent() - indexDoc(indexName, docId, docMessage) - } - } - - val input = randomClusterMetricsInput(path = path) - - // WHEN - val responseMap = (executeTransportAction(input, client())).toMap() - - // THEN - val shards = responseMap[WRAPPER_FIELD] as List> - val returnedIndices = - shards.map { (it[CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD] as String) to it }.toMap() - - assertEquals(testIndices.size, returnedIndices.keys.size) - testIndices.forEach { (indexName, docCount) -> - assertEquals( - indexName, - returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.INDEX_FIELD) as String - ) - assertEquals( - docCount.toString(), - returnedIndices[indexName]?.get(CatIndicesResponseWrapper.IndexInfo.DOCS_COUNT_FIELD) as String - ) - } - } - private fun indexDoc(index: String, id: String, doc: String) { - client().prepareIndex(index).setId(id) - .setSource(doc, XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get() + private fun indexDoc( + index: String, + id: String, + doc: String, + ) { + client() + .prepareIndex(index) + .setId(id) + .setSource(doc, XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get() } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsWrappersIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsWrappersIT.kt index c8b5db561..1d37cf738 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsWrappersIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/CatShardsWrappersIT.kt @@ -5,6 +5,7 @@ package org.opensearch.alerting.util.clusterMetricsMonitorHelpers +import kotlinx.coroutines.runBlocking import org.opensearch.action.support.WriteRequest import org.opensearch.alerting.randomClusterMetricsInput import org.opensearch.alerting.util.clusterMetricsMonitorHelpers.CatShardsResponseWrapper.Companion.WRAPPER_FIELD @@ -12,10 +13,12 @@ import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.alerting.model.ClusterMetricsInput import org.opensearch.core.common.Strings import org.opensearch.test.OpenSearchSingleNodeTestCase +import kotlin.test.Test class CatShardsWrappersIT : OpenSearchSingleNodeTestCase() { private val path = ClusterMetricsInput.ClusterMetricType.CAT_SHARDS.defaultPath + @Test fun `test CatShardsRequestWrapper validate valid pathParams`() { // GIVEN val pathParams = "index1,index_2,index-3" @@ -28,6 +31,7 @@ class CatShardsWrappersIT : OpenSearchSingleNodeTestCase() { assertEquals(3, requestWrapper.indicesStatsRequest.indices().size) } + @Test fun `test CatShardsRequestWrapper validate without providing pathParams`() { // GIVEN & WHEN val requestWrapper = CatShardsRequestWrapper() @@ -37,6 +41,7 @@ class CatShardsWrappersIT : OpenSearchSingleNodeTestCase() { assertNull(requestWrapper.indicesStatsRequest.indices()) } + @Test fun `test CatShardsRequestWrapper validate blank pathParams`() { // GIVEN val pathParams = " " @@ -49,6 +54,7 @@ class CatShardsWrappersIT : OpenSearchSingleNodeTestCase() { assertNull(requestWrapper.indicesStatsRequest.indices()) } + @Test fun `test CatShardsRequestWrapper validate empty pathParams`() { // GIVEN val pathParams = "" @@ -61,6 +67,7 @@ class CatShardsWrappersIT : OpenSearchSingleNodeTestCase() { assertNull(requestWrapper.indicesStatsRequest.indices()) } + @Test fun `test CatShardsRequestWrapper validate invalid pathParams`() { // GIVEN val pathParams = "_index1,index^2" @@ -69,97 +76,115 @@ class CatShardsWrappersIT : OpenSearchSingleNodeTestCase() { assertThrows(IllegalArgumentException::class.java) { CatShardsRequestWrapper(pathParams = pathParams) } } - suspend fun `test CatShardsResponseWrapper returns with only indices in pathParams`() { - // GIVEN - val testIndices = (1..5).map { - "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) - }.toMap() - - testIndices.forEach { (indexName, docCount) -> - repeat(docCount) { - val docId = (it + 1).toString() - val docMessage = """ - { - "message": "$indexName doc num $docId" - } - """.trimIndent() - indexDoc(indexName, docId, docMessage) + @Test + fun `test CatShardsResponseWrapper returns with only indices in pathParams`() = + runBlocking { + // GIVEN + val testIndices = + (1..5) + .map { + "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) + }.toMap() + + testIndices.forEach { (indexName, docCount) -> + repeat(docCount) { + val docId = (it + 1).toString() + val docMessage = + """ + { + "message": "$indexName doc num $docId" + } + """.trimIndent() + indexDoc(indexName, docId, docMessage) + } } - } /* Creating a subset of indices to use for the pathParams to test that all indices on the cluster ARE NOT returned. - */ - val pathParamsIndices = testIndices.keys.toList().subList(1, testIndices.size - 1) - val pathParams = pathParamsIndices.joinToString(",") - val input = randomClusterMetricsInput(path = path, pathParams = pathParams) + */ + val pathParamsIndices = testIndices.keys.toList().subList(1, testIndices.size - 1) + val pathParams = pathParamsIndices.joinToString(",") + val input = randomClusterMetricsInput(path = path, pathParams = pathParams) + + // WHEN + val responseMap = (executeTransportAction(input, client())).toMap() + + // THEN + val shards = responseMap[WRAPPER_FIELD] as List> + val returnedIndices = + shards.map { (it[CatShardsResponseWrapper.ShardInfo.INDEX_FIELD] as String) to it }.toMap() + + assertEquals(pathParamsIndices.size, returnedIndices.keys.size) + testIndices.forEach { (indexName, docCount) -> + if (pathParamsIndices.contains(indexName)) { + assertEquals( + indexName, + returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.INDEX_FIELD) as String, + ) + assertEquals( + docCount.toString(), + returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.DOCS_FIELD) as String, + ) + } + } + } - // WHEN - val responseMap = (executeTransportAction(input, client())).toMap() + @Test + fun `test CatShardsResponseWrapper returns with all indices when empty pathParams`() = + runBlocking { + // GIVEN + val testIndices = + (1..5) + .map { + "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) + }.toMap() + + testIndices.forEach { (indexName, docCount) -> + repeat(docCount) { + val docId = (it + 1).toString() + val docMessage = + """ + { + "message": "$indexName doc num $docId" + } + """.trimIndent() + indexDoc(indexName, docId, docMessage) + } + } - // THEN - val shards = responseMap[WRAPPER_FIELD] as List> - val returnedIndices = - shards.map { (it[CatShardsResponseWrapper.ShardInfo.INDEX_FIELD] as String) to it }.toMap() + val input = randomClusterMetricsInput(path = path) + + // WHEN + val responseMap = (executeTransportAction(input, client())).toMap() + + // THEN + val shards = responseMap[WRAPPER_FIELD] as List> + val returnedIndices = + shards.map { (it[CatShardsResponseWrapper.ShardInfo.INDEX_FIELD] as String) to it }.toMap() - assertEquals(pathParamsIndices.size, returnedIndices.keys.size) - testIndices.forEach { (indexName, docCount) -> - if (pathParamsIndices.contains(indexName)) { + assertEquals(testIndices.size, returnedIndices.keys.size) + testIndices.forEach { (indexName, docCount) -> assertEquals( indexName, - returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.INDEX_FIELD) as String + returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.INDEX_FIELD) as String, ) assertEquals( docCount.toString(), - returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.DOCS_FIELD) as String + returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.DOCS_FIELD) as String, ) } } - } - - suspend fun `test CatShardsResponseWrapper returns with all indices when empty pathParams`() { - // GIVEN - val testIndices = (1..5).map { - "test-index${randomAlphaOfLength(10).lowercase()}" to randomIntBetween(1, 10) - }.toMap() - - testIndices.forEach { (indexName, docCount) -> - repeat(docCount) { - val docId = (it + 1).toString() - val docMessage = """ - { - "message": "$indexName doc num $docId" - } - """.trimIndent() - indexDoc(indexName, docId, docMessage) - } - } - - val input = randomClusterMetricsInput(path = path) - - // WHEN - val responseMap = (executeTransportAction(input, client())).toMap() - - // THEN - val shards = responseMap[WRAPPER_FIELD] as List> - val returnedIndices = - shards.map { (it[CatShardsResponseWrapper.ShardInfo.INDEX_FIELD] as String) to it }.toMap() - - assertEquals(testIndices.size, returnedIndices.keys.size) - testIndices.forEach { (indexName, docCount) -> - assertEquals( - indexName, - returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.INDEX_FIELD) as String - ) - assertEquals( - docCount.toString(), - returnedIndices[indexName]?.get(CatShardsResponseWrapper.ShardInfo.DOCS_FIELD) as String - ) - } - } - private fun indexDoc(index: String, id: String, doc: String) { - client().prepareIndex(index).setId(id) - .setSource(doc, XContentType.JSON).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get() + private fun indexDoc( + index: String, + id: String, + doc: String, + ) { + client() + .prepareIndex(index) + .setId(id) + .setSource(doc, XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get() } } diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensionsTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensionsTests.kt index bfe5b8dce..0882f80ea 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensionsTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/clusterMetricsMonitorHelpers/SupportedClusterMetricsSettingsExtensionsTests.kt @@ -6,63 +6,75 @@ package org.opensearch.alerting.util.clusterMetricsMonitorHelpers import org.opensearch.test.OpenSearchTestCase +import kotlin.test.Test class SupportedClusterMetricsSettingsExtensionsTests : OpenSearchTestCase() { private var expectedResponse = hashMapOf() private var mappedResponse = hashMapOf() private var supportedJsonPayload = hashMapOf>() + @Test fun `test redactFieldsFromResponse with non-empty supportedJsonPayload`() { // GIVEN - mappedResponse = hashMapOf( - ( - "pathRoot1" to hashMapOf( - ("pathRoot1_subPath1" to 11), - ( - "pathRoot1_subPath2" to hashMapOf( - ("pathRoot1_subPath2_subPath1" to 121), + mappedResponse = + hashMapOf( + ( + "pathRoot1" to + hashMapOf( + ("pathRoot1_subPath1" to 11), ( - "pathRoot1_subPath2_subPath2" to hashMapOf( - ("pathRoot1_subPath2_subPath2_subPath1" to 1221) - ) - ) + "pathRoot1_subPath2" to + hashMapOf( + ("pathRoot1_subPath2_subPath1" to 121), + ( + "pathRoot1_subPath2_subPath2" to + hashMapOf( + ("pathRoot1_subPath2_subPath2_subPath1" to 1221), + ) + ), + ) + ), ) - ) - ) ), - ("pathRoot2" to hashMapOf(("pathRoot2_subPath1" to 21), ("pathRoot2_subPath2" to setOf(221, 222, "223string")))), - ("pathRoot3" to hashMapOf(("pathRoot3_subPath1" to 31), ("pathRoot3_subPath2" to setOf(321, 322, "323string")))) - ) + ("pathRoot2" to hashMapOf(("pathRoot2_subPath1" to 21), ("pathRoot2_subPath2" to setOf(221, 222, "223string")))), + ("pathRoot3" to hashMapOf(("pathRoot3_subPath1" to 31), ("pathRoot3_subPath2" to setOf(321, 322, "323string")))), + ) - supportedJsonPayload = hashMapOf( - ( - "pathRoot1" to arrayListOf( - "pathRoot1_subPath1", - "pathRoot1_subPath2.pathRoot1_subPath2_subPath2.pathRoot1_subPath2_subPath2_subPath1" - ) + supportedJsonPayload = + hashMapOf( + ( + "pathRoot1" to + arrayListOf( + "pathRoot1_subPath1", + "pathRoot1_subPath2.pathRoot1_subPath2_subPath2.pathRoot1_subPath2_subPath2_subPath1", + ) ), - ("pathRoot2" to arrayListOf("pathRoot2_subPath2")), - ("pathRoot3" to arrayListOf()) - ) + ("pathRoot2" to arrayListOf("pathRoot2_subPath2")), + ("pathRoot3" to arrayListOf()), + ) - expectedResponse = hashMapOf( - ( - "pathRoot1" to hashMapOf( - ("pathRoot1_subPath1" to 11), - ( - "pathRoot1_subPath2" to hashMapOf( + expectedResponse = + hashMapOf( + ( + "pathRoot1" to + hashMapOf( + ("pathRoot1_subPath1" to 11), ( - "pathRoot1_subPath2_subPath2" to hashMapOf( - ("pathRoot1_subPath2_subPath2_subPath1" to 1221) - ) - ) - ) + "pathRoot1_subPath2" to + hashMapOf( + ( + "pathRoot1_subPath2_subPath2" to + hashMapOf( + ("pathRoot1_subPath2_subPath2_subPath1" to 1221), + ) + ), + ) + ), ) - ) ), - ("pathRoot2" to hashMapOf(("pathRoot2_subPath2" to setOf(221, 222, "223string")))), - ("pathRoot3" to hashMapOf(("pathRoot3_subPath1" to 31), ("pathRoot3_subPath2" to setOf(321, 322, "323string")))) - ) + ("pathRoot2" to hashMapOf(("pathRoot2_subPath2" to setOf(221, 222, "223string")))), + ("pathRoot3" to hashMapOf(("pathRoot3_subPath1" to 31), ("pathRoot3_subPath2" to setOf(321, 322, "323string")))), + ) // WHEN val result = redactFieldsFromResponse(mappedResponse, supportedJsonPayload) @@ -71,47 +83,56 @@ class SupportedClusterMetricsSettingsExtensionsTests : OpenSearchTestCase() { assertEquals(expectedResponse, result) } + @Test fun `test redactFieldsFromResponse with empty supportedJsonPayload`() { // GIVEN - mappedResponse = hashMapOf( - ( - "pathRoot1" to hashMapOf( - ("pathRoot1_subPath1" to 11), - ( - "pathRoot1_subPath2" to hashMapOf( - ("pathRoot1_subPath2_subPath1" to 121), + mappedResponse = + hashMapOf( + ( + "pathRoot1" to + hashMapOf( + ("pathRoot1_subPath1" to 11), ( - "pathRoot1_subPath2_subPath2" to hashMapOf( - ("pathRoot1_subPath2_subPath2_subPath1" to 1221) - ) - ) + "pathRoot1_subPath2" to + hashMapOf( + ("pathRoot1_subPath2_subPath1" to 121), + ( + "pathRoot1_subPath2_subPath2" to + hashMapOf( + ("pathRoot1_subPath2_subPath2_subPath1" to 1221), + ) + ), + ) + ), ) - ) - ) ), - ("pathRoot2" to hashMapOf(("pathRoot2_subPath1" to 21), ("pathRoot2_subPath2" to setOf(221, 222, "223string")))), - ("pathRoot3" to 3) - ) + ("pathRoot2" to hashMapOf(("pathRoot2_subPath1" to 21), ("pathRoot2_subPath2" to setOf(221, 222, "223string")))), + ("pathRoot3" to 3), + ) - expectedResponse = hashMapOf( - ( - "pathRoot1" to hashMapOf( - ("pathRoot1_subPath1" to 11), - ( - "pathRoot1_subPath2" to hashMapOf( - ("pathRoot1_subPath2_subPath1" to 121), + expectedResponse = + hashMapOf( + ( + "pathRoot1" to + hashMapOf( + ("pathRoot1_subPath1" to 11), ( - "pathRoot1_subPath2_subPath2" to hashMapOf( - ("pathRoot1_subPath2_subPath2_subPath1" to 1221) - ) - ) - ) + "pathRoot1_subPath2" to + hashMapOf( + ("pathRoot1_subPath2_subPath1" to 121), + ( + "pathRoot1_subPath2_subPath2" to + hashMapOf( + ("pathRoot1_subPath2_subPath2_subPath1" to 1221), + ) + ), + ) + ), ) - ) ), - ("pathRoot2" to hashMapOf(("pathRoot2_subPath1" to 21), ("pathRoot2_subPath2" to setOf(221, 222, "223string")))), - ("pathRoot3" to 3) - ) + ("pathRoot2" to hashMapOf(("pathRoot2_subPath1" to 21), ("pathRoot2_subPath2" to setOf(221, 222, "223string")))), + ("pathRoot3" to 3), + ) // WHEN val result = redactFieldsFromResponse(mappedResponse, supportedJsonPayload) diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt b/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt index 903eedb44..e1d069ea7 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/util/destinationmigration/DestinationMigrationUtilServiceIT.kt @@ -22,28 +22,31 @@ import org.opensearch.test.OpenSearchTestCase import java.time.Instant import java.util.UUID import java.util.concurrent.TimeUnit +import kotlin.test.Test class DestinationMigrationUtilServiceIT : AlertingRestTestCase() { - + @Test fun `test migrateData`() { if (isNotificationPluginInstalled()) { // Create alerting config index createRandomMonitor() - val emailAccount = EmailAccount( - name = "test", - email = "test@email.com", - host = "smtp.com", - port = 25, - method = EmailAccount.MethodType.NONE, - username = null, - password = null - ) + val emailAccount = + EmailAccount( + name = "test", + email = "test@email.com", + host = "smtp.com", + port = 25, + method = EmailAccount.MethodType.NONE, + username = null, + password = null, + ) val emailAccountDoc = "{\"email_account\" : ${emailAccount.toJsonString()}}" - val emailGroup = EmailGroup( - name = "test", - emails = listOf(EmailEntry("test@email.com")) - ) + val emailGroup = + EmailGroup( + name = "test", + emails = listOf(EmailEntry("test@email.com")), + ) val emailGroupDoc = "{\"email_group\" : ${emailGroup.toJsonString()}}" val emailAccountId = UUID.randomUUID().toString() val emailGroupId = UUID.randomUUID().toString() @@ -52,17 +55,18 @@ class DestinationMigrationUtilServiceIT : AlertingRestTestCase() { val recipient = Recipient(Recipient.RecipientType.EMAIL, null, "test@email.com") val email = Email(emailAccountId, listOf(recipient)) - val emailDest = Destination( - id = UUID.randomUUID().toString(), - type = DestinationType.EMAIL, - name = "test", - user = randomUser(), - lastUpdateTime = Instant.now(), - chime = null, - slack = null, - customWebhook = null, - email = email - ) + val emailDest = + Destination( + id = UUID.randomUUID().toString(), + type = DestinationType.EMAIL, + name = "test", + user = randomUser(), + lastUpdateTime = Instant.now(), + chime = null, + slack = null, + customWebhook = null, + email = email, + ) val slackDestination = getSlackDestination().copy(id = UUID.randomUUID().toString()) val chimeDestination = getChimeDestination().copy(id = UUID.randomUUID().toString()) val customWebhookDestination = getCustomWebhookDestination().copy(id = UUID.randomUUID().toString()) @@ -71,11 +75,12 @@ class DestinationMigrationUtilServiceIT : AlertingRestTestCase() { val ids = mutableListOf(emailAccountId, emailGroupId) for (destination in destinations) { - val dest = """ + val dest = + """ { "destination" : ${destination.toJsonString()} } - """.trimIndent() + """.trimIndent() indexDocWithAdminClient(SCHEDULED_JOBS_INDEX, destination.id, dest) ids.add(destination.id) } @@ -87,16 +92,17 @@ class DestinationMigrationUtilServiceIT : AlertingRestTestCase() { }, 2, TimeUnit.MINUTES) for (id in ids) { - val response = client().makeRequest( - "GET", - "_plugins/_notifications/configs/$id" - ) + val response = + client().makeRequest( + "GET", + "_plugins/_notifications/configs/$id", + ) assertEquals(RestStatus.OK, response.restStatus()) try { client().makeRequest( "GET", - ".opendistro-alerting-config/_doc/$id" + ".opendistro-alerting-config/_doc/$id", ) fail("Expecting ResponseException") } catch (e: ResponseException) { diff --git a/build.gradle b/build.gradle index 40010fef0..280050939 100644 --- a/build.gradle +++ b/build.gradle @@ -79,7 +79,7 @@ configurations.ktlint.incoming.beforeResolve { } dependencies { - add("ktlint", "com.pinterest:ktlint:0.45.1") { + add("ktlint", "com.pinterest.ktlint:ktlint-cli:1.8.0") { attributes { attribute(Bundling.BUNDLING_ATTRIBUTE, objects.named(Bundling, Bundling.EXTERNAL)) } @@ -90,24 +90,14 @@ task ktlint(type: JavaExec, group: "verification") { description = "Check Kotlin code style." mainClass = "com.pinterest.ktlint.Main" classpath = configurations.ktlint - args "alerting/**/*.kt", "elastic-api/**/*.kt", "core/**/*.kt" - - // Skip on JDK 25 - onlyIf { - JavaVersion.current() < JavaVersion.VERSION_25 - } + args "alerting/**/*.kt", "sample-remote-monitor-plugin/**/*.kt", "core/**/*.kt", "spi/**/*.kt" } task ktlintFormat(type: JavaExec, group: "formatting") { description = "Fix Kotlin code style deviations." mainClass = "com.pinterest.ktlint.Main" classpath = configurations.ktlint - args "-F", "alerting/**/*.kt", "elastic-api/**/*.kt", "core/**/*.kt" - - // Skip on JDK 25 until ktlint fully supports it - onlyIf { - JavaVersion.current() < JavaVersion.VERSION_25 - } + args "-F", "alerting/**/*.kt", "sample-remote-monitor-plugin/**/*.kt", "core/**/*.kt", "spi/**/*.kt" } check.dependsOn ktlint diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt b/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt index c251c8c6a..7722a9de8 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/JobRunner.kt @@ -13,5 +13,9 @@ interface JobRunner { fun postIndex(job: ScheduledJob) - fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant) + fun runJob( + job: ScheduledJob, + periodStart: Instant, + periodEnd: Instant, + ) } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt b/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt index e08b1360f..733087526 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeper.kt @@ -76,8 +76,10 @@ class JobSweeper( private val threadPool: ThreadPool, private val xContentRegistry: NamedXContentRegistry, private val scheduler: JobScheduler, - private val sweepableJobTypes: List -) : ClusterStateListener, IndexingOperationListener, LifecycleListener() { + private val sweepableJobTypes: List, +) : LifecycleListener(), + ClusterStateListener, + IndexingOperationListener { private val logger = LogManager.getLogger(javaClass) private val fullSweepExecutor = Executors.newSingleThreadExecutor(OpenSearchExecutors.daemonThreadFactory("opendistro_job_sweeper")) @@ -89,11 +91,17 @@ class JobSweeper( @Volatile private var lastFullSweepTimeNano = System.nanoTime() @Volatile private var requestTimeout = REQUEST_TIMEOUT.get(settings) + @Volatile private var sweepPeriod = SWEEP_PERIOD.get(settings) + @Volatile private var sweeperEnabled = SWEEPER_ENABLED.get(settings) + @Volatile private var sweepPageSize = SWEEP_PAGE_SIZE.get(settings) + @Volatile private var sweepBackoffMillis = SWEEP_BACKOFF_MILLIS.get(settings) + @Volatile private var sweepBackoffRetryCount = SWEEP_BACKOFF_RETRY_COUNT.get(settings) + @Volatile private var sweepSearchBackoff = BackoffPolicy.exponentialBackoff(sweepBackoffMillis, sweepBackoffRetryCount) init { @@ -158,7 +166,11 @@ class JobSweeper( * of jobs are not scheduled. It schedules job only if it is one of the [sweepableJobTypes] * */ - override fun postIndex(shardId: ShardId, index: Engine.Index, result: Engine.IndexResult) { + override fun postIndex( + shardId: ShardId, + index: Engine.Index, + result: Engine.IndexResult, + ) { if (!isSweepingEnabled()) return if (result.resultType != Engine.Result.Type.SUCCESS) { @@ -183,7 +195,11 @@ class JobSweeper( * This callback is invoked when a job is deleted from a shard. The job is descheduled. Relies on all delete operations * using optimistic concurrency control to ensure that stale versions of jobs are not scheduled. */ - override fun postDelete(shardId: ShardId, delete: Engine.Delete, result: Engine.DeleteResult) { + override fun postDelete( + shardId: ShardId, + delete: Engine.Delete, + result: Engine.DeleteResult, + ) { if (!isSweepingEnabled()) return if (result.resultType != Engine.Result.Type.SUCCESS) { @@ -225,7 +241,6 @@ class JobSweeper( } private fun initBackgroundSweep() { - // if sweeping disabled, background sweep should not be triggered if (!isSweepingEnabled()) return @@ -240,20 +255,21 @@ class JobSweeper( } // Setup an anti-entropy/self-healing background sweep, in case a sweep that was triggered by an event fails. - val scheduledSweep = Runnable { - val elapsedTime = getFullSweepElapsedTime() - - // Rate limit to at most one full sweep per sweep period - // The schedule runs may wake up a few milliseconds early. - // Delta will be giving some buffer on the schedule to allow waking up slightly earlier. - val delta = sweepPeriod.millis - elapsedTime.millis - if (delta < 20L) { // give 20ms buffer. - fullSweepExecutor.submit { - logger.debug("Performing background sweep of scheduled jobs.") - sweepAllShards() + val scheduledSweep = + Runnable { + val elapsedTime = getFullSweepElapsedTime() + + // Rate limit to at most one full sweep per sweep period + // The schedule runs may wake up a few milliseconds early. + // Delta will be giving some buffer on the schedule to allow waking up slightly earlier. + val delta = sweepPeriod.millis - elapsedTime.millis + if (delta < 20L) { // give 20ms buffer. + fullSweepExecutor.submit { + logger.debug("Performing background sweep of scheduled jobs.") + sweepAllShards() + } } } - } scheduledFullSweep = threadPool.scheduleWithFixedDelay(scheduledSweep, sweepPeriod, ThreadPool.Names.SAME) } @@ -268,13 +284,15 @@ class JobSweeper( // Find all shards that are currently assigned to this node. val localNodeId = clusterState.nodes.localNodeId - val localShards = clusterState.routingTable.allShards(ScheduledJob.SCHEDULED_JOBS_INDEX) - // Find all active shards - .filter { it.active() } - // group by shardId - .groupBy { it.shardId() } - // assigned to local node - .filter { (_, shards) -> shards.any { it.currentNodeId() == localNodeId } } + val localShards = + clusterState.routingTable + .allShards(ScheduledJob.SCHEDULED_JOBS_INDEX) + // Find all active shards + .filter { it.active() } + // group by shardId + .groupBy { it.shardId() } + // assigned to local node + .filter { (_, shards) -> shards.any { it.currentNodeId() == localNodeId } } // Remove all jobs on shards that are no longer assigned to this node. val removedShards = sweptJobs.keys - localShards.keys @@ -295,7 +313,11 @@ class JobSweeper( lastFullSweepTimeNano = System.nanoTime() } - private fun sweepShard(shardId: ShardId, shardNodes: ShardNodes, startAfter: String = "") { + private fun sweepShard( + shardId: ShardId, + shardNodes: ShardNodes, + startAfter: String = "", + ) { val logger = Loggers.getLogger(javaClass, shardId) logger.debug("Sweeping shard $shardId") @@ -312,35 +334,40 @@ class JobSweeper( while (searchAfter != null) { val boolQueryBuilder = BoolQueryBuilder() sweepableJobTypes.forEach { boolQueryBuilder.should(QueryBuilders.existsQuery(it)) } - val jobSearchRequest = SearchRequest() - .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) - .preference("_shards:${shardId.id}|_only_local") - .source( - SearchSourceBuilder.searchSource() - .version(true) - .sort( - FieldSortBuilder("_id") - .unmappedType("keyword") - .missing("_last") - ) - .searchAfter(arrayOf(searchAfter)) - .size(sweepPageSize) - .query(boolQueryBuilder) - ) + val jobSearchRequest = + SearchRequest() + .indices(ScheduledJob.SCHEDULED_JOBS_INDEX) + .preference("_shards:${shardId.id}|_only_local") + .source( + SearchSourceBuilder + .searchSource() + .version(true) + .sort( + FieldSortBuilder("_id") + .unmappedType("keyword") + .missing("_last"), + ).searchAfter(arrayOf(searchAfter)) + .size(sweepPageSize) + .query(boolQueryBuilder), + ) - val response = sweepSearchBackoff.retry { - client.search(jobSearchRequest).actionGet(requestTimeout) - } + val response = + sweepSearchBackoff.retry { + client.search(jobSearchRequest).actionGet(requestTimeout) + } if (response.status() != RestStatus.OK) { logger.error("Error sweeping shard $shardId.", response.firstFailureOrNull()) return } for (hit in response.hits) { if (shardNodes.isOwningNode(hit.id)) { - val xcp = XContentHelper.createParser( - xContentRegistry, LoggingDeprecationHandler.INSTANCE, - hit.sourceRef, XContentType.JSON - ) + val xcp = + XContentHelper.createParser( + xContentRegistry, + LoggingDeprecationHandler.INSTANCE, + hit.sourceRef, + XContentType.JSON, + ) parseAndSweepJob(xcp, shardId, hit.id, hit.version, hit.sourceRef) } } @@ -353,9 +380,10 @@ class JobSweeper( jobId: JobId, newVersion: JobVersion, job: ScheduledJob?, - failedToParse: Boolean = false + failedToParse: Boolean = false, ) { - sweptJobs.getOrPut(shardId) { ConcurrentHashMap() } + sweptJobs + .getOrPut(shardId) { ConcurrentHashMap() } // Use [compute] to update atomically in case another thread concurrently indexes/deletes the same job .compute(jobId) { _, currentVersion -> val jobCurrentlyScheduled = scheduler.scheduledJobs().contains(jobId) @@ -401,7 +429,7 @@ class JobSweeper( newVersion: JobVersion, currentVersion: JobVersion?, jobCurrentlyScheduled: Boolean, - job: ScheduledJob? + job: ScheduledJob?, ): Boolean { // newVersion should not be [Versions.NOT_FOUND] here since it's passed in from existing search hits // or successful doc delete operations @@ -417,33 +445,34 @@ class JobSweeper( jobId: JobId, jobVersion: JobVersion, jobSource: BytesReference, - typeIsParsed: Boolean = false - ): ScheduledJob? { - return try { + typeIsParsed: Boolean = false, + ): ScheduledJob? = + try { val job = parseScheduledJob(xcp, jobId, jobVersion, typeIsParsed) sweep(shardId, jobId, jobVersion, job) job } catch (e: Exception) { logger.warn( "Unable to parse ScheduledJob source: {}", - Strings.cleanTruncate(jobSource.utf8ToString(), 1000) + Strings.cleanTruncate(jobSource.utf8ToString(), 1000), ) sweep(shardId, jobId, jobVersion, null, true) null } - } - private fun parseScheduledJob(xcp: XContentParser, jobId: JobId, jobVersion: JobVersion, typeIsParsed: Boolean): ScheduledJob { - return if (typeIsParsed) { + private fun parseScheduledJob( + xcp: XContentParser, + jobId: JobId, + jobVersion: JobVersion, + typeIsParsed: Boolean, + ): ScheduledJob = + if (typeIsParsed) { ScheduledJob.parse(xcp, xcp.currentName(), jobId, jobVersion) } else { ScheduledJob.parse(xcp, jobId, jobVersion) } - } - private fun getFullSweepElapsedTime(): TimeValue { - return TimeValue.timeValueNanos(System.nanoTime() - lastFullSweepTimeNano) - } + private fun getFullSweepElapsedTime(): TimeValue = TimeValue.timeValueNanos(System.nanoTime() - lastFullSweepTimeNano) fun getJobSweeperMetrics(): JobSweeperMetrics { if (!isSweepingEnabled()) { @@ -460,11 +489,18 @@ class JobSweeper( return sweepableJobTypes.contains(jobType) } - private fun isOwningNode(shardId: ShardId, jobId: JobId): Boolean { + private fun isOwningNode( + shardId: ShardId, + jobId: JobId, + ): Boolean { val localNodeId = clusterService.localNode().id - val shardNodeIds = clusterService.state().routingTable.shardRoutingTable(shardId) - .filter { it.active() } - .map { it.currentNodeId() } + val shardNodeIds = + clusterService + .state() + .routingTable + .shardRoutingTable(shardId) + .filter { it.active() } + .map { it.currentNodeId() } val shardNodes = ShardNodes(localNodeId, shardNodeIds) return shardNodes.isOwningNode(jobId) } @@ -478,8 +514,10 @@ class JobSweeper( * Implementation notes: This class is not thread safe. It uses the same [hash function][Murmur3HashFunction] that OpenSearch uses * for routing. For each real node `100` virtual nodes are added to provide a good distribution. */ -private class ShardNodes(val localNodeId: String, activeShardNodeIds: Collection) { - +private class ShardNodes( + val localNodeId: String, + activeShardNodeIds: Collection, +) { private val circle = TreeMap() companion object { diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeperMetrics.kt b/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeperMetrics.kt index 9a10586d1..df6b2295b 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeperMetrics.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/JobSweeperMetrics.kt @@ -12,8 +12,11 @@ import org.opensearch.core.xcontent.ToXContent import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder -data class JobSweeperMetrics(val lastFullSweepTimeMillis: Long, val fullSweepOnTime: Boolean) : ToXContentFragment, Writeable { - +data class JobSweeperMetrics( + val lastFullSweepTimeMillis: Long, + val fullSweepOnTime: Boolean, +) : ToXContentFragment, + Writeable { constructor(si: StreamInput) : this(si.readLong(), si.readBoolean()) override fun writeTo(out: StreamOutput) { @@ -21,7 +24,10 @@ data class JobSweeperMetrics(val lastFullSweepTimeMillis: Long, val fullSweepOnT out.writeBoolean(fullSweepOnTime) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.field("last_full_sweep_time_millis", lastFullSweepTimeMillis) builder.field("full_sweep_on_time", fullSweepOnTime) return builder diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt b/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt index 87c8d362e..041bdfedd 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/ScheduledJobIndices.kt @@ -20,14 +20,18 @@ import org.opensearch.transport.client.AdminClient * [initScheduledJobIndex] is called before indexing a new scheduled job. It verifies that the index exists before * allowing the index to go through. This is to ensure the correct mappings exist for [ScheduledJob]. */ -class ScheduledJobIndices(private val client: AdminClient, private val clusterService: ClusterService) { - +class ScheduledJobIndices( + private val client: AdminClient, + private val clusterService: ClusterService, +) { companion object { @JvmStatic - fun scheduledJobMappings(): String { - return ScheduledJobIndices::class.java.classLoader.getResource("mappings/scheduled-jobs.json").readText() - } + fun scheduledJobMappings(): String = + ScheduledJobIndices::class.java.classLoader + .getResource("mappings/scheduled-jobs.json") + .readText() } + /** * Initialize the indices required for scheduled jobs. * First check if the index exists, and if not create the index with the provided callback listeners. @@ -36,9 +40,10 @@ class ScheduledJobIndices(private val client: AdminClient, private val clusterSe */ fun initScheduledJobIndex(actionListener: ActionListener) { if (!scheduledJobIndexExists()) { - var indexRequest = CreateIndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) - .mapping(scheduledJobMappings()) - .settings(Settings.builder().put("index.hidden", true).build()) + var indexRequest = + CreateIndexRequest(ScheduledJob.SCHEDULED_JOBS_INDEX) + .mapping(scheduledJobMappings()) + .settings(Settings.builder().put("index.hidden", true).build()) client.indices().create(indexRequest, actionListener) } } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobStats.kt b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobStats.kt index 0e02817fc..2c8a09ab1 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobStats.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobStats.kt @@ -21,15 +21,17 @@ import java.util.Locale /** * Scheduled job stat that will be generated by each node. */ -class ScheduledJobStats : BaseNodeResponse, ToXContentFragment { - - enum class ScheduleStatus(val status: String) { +class ScheduledJobStats : + BaseNodeResponse, + ToXContentFragment { + enum class ScheduleStatus( + val status: String, + ) { RED("red"), - GREEN("green"); + GREEN("green"), + ; - override fun toString(): String { - return status - } + override fun toString(): String = status } var status: ScheduleStatus @@ -46,7 +48,7 @@ class ScheduledJobStats : BaseNodeResponse, ToXContentFragment { node: DiscoveryNode, status: ScheduleStatus, jobSweeperMetrics: JobSweeperMetrics?, - jobsInfo: Array? + jobsInfo: Array?, ) : super(node) { this.status = status this.jobSweeperMetrics = jobSweeperMetrics @@ -65,7 +67,10 @@ class ScheduledJobStats : BaseNodeResponse, ToXContentFragment { out.writeOptionalArray(jobInfos) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.field("name", node.name) builder.field("schedule_status", status) builder.field("roles", node.roles.map { it.roleName().uppercase(Locale.getDefault()) }) diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsAction.kt b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsAction.kt index 698c6c44e..09383421a 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsAction.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsAction.kt @@ -13,13 +13,12 @@ class ScheduledJobsStatsAction : ActionType(NAME, re val INSTANCE = ScheduledJobsStatsAction() const val NAME = "cluster:admin/opendistro/_scheduled_jobs/stats" - val reader = Writeable.Reader { - val response = ScheduledJobsStatsResponse(it) - response - } + val reader = + Writeable.Reader { + val response = ScheduledJobsStatsResponse(it) + response + } } - override fun getResponseReader(): Writeable.Reader { - return reader - } + override fun getResponseReader(): Writeable.Reader = reader } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsRequest.kt b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsRequest.kt index 1d9bd0578..824886b20 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsRequest.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsRequest.kt @@ -17,6 +17,7 @@ import java.io.IOException class ScheduledJobsStatsRequest : BaseNodesRequest { var jobSchedulingMetrics: Boolean = true var jobsInfo: Boolean = true + // show Alerting V2 scheduled jobs if true, Alerting V1 scheduled jobs if false, all scheduled jobs if null var showAlertingV2ScheduledJobs: Boolean? = null diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsResponse.kt b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsResponse.kt index edfcc0cce..40cc98298 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsResponse.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsResponse.kt @@ -20,8 +20,9 @@ import org.opensearch.core.xcontent.XContentBuilder /** * ScheduledJobsStatsResponse is a class that will contain all the response from each node. */ -class ScheduledJobsStatsResponse : BaseNodesResponse, ToXContentFragment { - +class ScheduledJobsStatsResponse : + BaseNodesResponse, + ToXContentFragment { private var scheduledJobEnabled: Boolean = false private var indexExists: Boolean? = null private var indexHealth: ClusterIndexHealth? = null @@ -38,7 +39,7 @@ class ScheduledJobsStatsResponse : BaseNodesResponse, ToXCont failures: List, scheduledJobEnabled: Boolean, indexExists: Boolean, - indexHealth: ClusterIndexHealth? + indexHealth: ClusterIndexHealth?, ) : super(clusterName, nodeResponses, failures) { this.scheduledJobEnabled = scheduledJobEnabled this.indexExists = indexExists @@ -47,16 +48,18 @@ class ScheduledJobsStatsResponse : BaseNodesResponse, ToXCont override fun writeNodesTo( out: StreamOutput, - nodes: MutableList + nodes: MutableList, ) { out.writeList(nodes) } - override fun readNodesFrom(si: StreamInput): MutableList { - return si.readList { ScheduledJobStats.readScheduledJobStatus(it) } - } + override fun readNodesFrom(si: StreamInput): MutableList = + si.readList { ScheduledJobStats.readScheduledJobStatus(it) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { builder.field(LegacyOpenDistroScheduledJobSettings.SWEEPER_ENABLED.key, scheduledJobEnabled) builder.field(ScheduledJobSettings.SWEEPER_ENABLED.key, scheduledJobEnabled) builder.field("scheduled_job_index_exists", indexExists) diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsTransportAction.kt b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsTransportAction.kt index 398f5634a..e959b8780 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsTransportAction.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/action/node/ScheduledJobsStatsTransportAction.kt @@ -26,9 +26,13 @@ import java.io.IOException private val log = LogManager.getLogger(ScheduledJobsStatsTransportAction::class.java) -class ScheduledJobsStatsTransportAction : TransportNodesAction { - +class ScheduledJobsStatsTransportAction : + TransportNodesAction< + ScheduledJobsStatsRequest, + ScheduledJobsStatsResponse, + ScheduledJobsStatsTransportAction.ScheduledJobStatusRequest, + ScheduledJobStats, + > { private val jobSweeper: JobSweeper private val jobScheduler: JobScheduler private val scheduledJobIndices: ScheduledJobIndices @@ -41,7 +45,7 @@ class ScheduledJobsStatsTransportAction : TransportNodesAction, - failures: MutableList + failures: MutableList, ): ScheduledJobsStatsResponse { val scheduledJobEnabled = jobSweeper.isSweepingEnabled() val scheduledJobIndexExist = scheduledJobIndices.scheduledJobIndexExists() @@ -81,17 +81,13 @@ class ScheduledJobsStatsTransportAction : TransportNodesAction, - jobSweeperMetrics: JobSweeperMetrics + jobSweeperMetrics: JobSweeperMetrics, ): ScheduledJobStats.ScheduleStatus { val allJobsRunningOnTime = jobsInfo.all { it.runningOnTime } if (allJobsRunningOnTime && jobSweeperMetrics.fullSweepOnTime) { @@ -118,7 +114,6 @@ class ScheduledJobsStatsTransportAction : TransportNodesAction + listener: ActionListener, ) { val scheduledJobId = scheduledJob.id acquireLockWithId(scheduledJobId, listener) @@ -62,7 +63,7 @@ class LockService(private val client: Client, private val clusterService: Cluste fun acquireLockWithId( scheduledJobId: String, - listener: ActionListener + listener: ActionListener, ) { val lockId = LockModel.generateLockId(scheduledJobId) createLockIndex( @@ -103,7 +104,7 @@ class LockService(private val client: Client, private val clusterService: Cluste override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } catch (e: VersionConflictEngineException) { log.debug("could not acquire lock {}", e.message) @@ -117,20 +118,22 @@ class LockService(private val client: Client, private val clusterService: Cluste override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } private fun createLock( tempLock: LockModel, - listener: ActionListener + listener: ActionListener, ) { try { - val request = IndexRequest(LOCK_INDEX_NAME).id(tempLock.lockId) - .source(tempLock.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .setIfSeqNo(SequenceNumbers.UNASSIGNED_SEQ_NO) - .setIfPrimaryTerm(SequenceNumbers.UNASSIGNED_PRIMARY_TERM) - .create(true) + val request = + IndexRequest(LOCK_INDEX_NAME) + .id(tempLock.lockId) + .source(tempLock.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .setIfSeqNo(SequenceNumbers.UNASSIGNED_SEQ_NO) + .setIfPrimaryTerm(SequenceNumbers.UNASSIGNED_PRIMARY_TERM) + .create(true) client.index( request, object : ActionListener { @@ -146,7 +149,7 @@ class LockService(private val client: Client, private val clusterService: Cluste } listener.onFailure(e) } - } + }, ) } catch (ex: IOException) { log.error("IOException occurred creating lock", ex) @@ -156,15 +159,17 @@ class LockService(private val client: Client, private val clusterService: Cluste private fun updateLock( updateLock: LockModel, - listener: ActionListener + listener: ActionListener, ) { try { - val updateRequest = UpdateRequest().index(LOCK_INDEX_NAME) - .id(updateLock.lockId) - .setIfSeqNo(updateLock.seqNo) - .setIfPrimaryTerm(updateLock.primaryTerm) - .doc(updateLock.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) - .fetchSource(true) + val updateRequest = + UpdateRequest() + .index(LOCK_INDEX_NAME) + .id(updateLock.lockId) + .setIfSeqNo(updateLock.seqNo) + .setIfPrimaryTerm(updateLock.primaryTerm) + .doc(updateLock.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .fetchSource(true) client.update( updateRequest, @@ -180,7 +185,7 @@ class LockService(private val client: Client, private val clusterService: Cluste if (e is DocumentMissingException) { log.debug( "Document is deleted. This happens if the job is already removed and" + " this is the last run." + "{}", - e.message + e.message, ) } if (e is IOException) { @@ -188,7 +193,7 @@ class LockService(private val client: Client, private val clusterService: Cluste } listener.onResponse(null) } - } + }, ) } catch (ex: IOException) { log.error("IOException occurred updating lock.", ex) @@ -198,7 +203,7 @@ class LockService(private val client: Client, private val clusterService: Cluste fun findLock( lockId: String, - listener: ActionListener + listener: ActionListener, ) { val getRequest = GetRequest(LOCK_INDEX_NAME).id(lockId) client.get( @@ -209,8 +214,10 @@ class LockService(private val client: Client, private val clusterService: Cluste listener.onResponse(null) } else { try { - val parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, response.sourceAsString) + val parser = + XContentType.JSON + .xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, response.sourceAsString) parser.nextToken() listener.onResponse(LockModel.parse(parser, response.seqNo, response.primaryTerm)) } catch (e: IOException) { @@ -224,13 +231,13 @@ class LockService(private val client: Client, private val clusterService: Cluste log.error("Exception occurred finding lock", e) listener.onFailure(e) } - } + }, ) } fun release( lock: LockModel?, - listener: ActionListener + listener: ActionListener, ) { if (lock == null) { log.error("Lock is null. Nothing to release.") @@ -248,14 +255,14 @@ class LockService(private val client: Client, private val clusterService: Cluste override fun onFailure(e: Exception) { listener.onFailure(e) } - } + }, ) } } fun deleteLock( lockId: String, - listener: ActionListener + listener: ActionListener, ) { val deleteRequest = DeleteRequest(LOCK_INDEX_NAME).id(lockId) client.delete( @@ -263,7 +270,7 @@ class LockService(private val client: Client, private val clusterService: Cluste object : ActionListener { override fun onResponse(response: DeleteResponse) { listener.onResponse( - response.result == DocWriteResponse.Result.DELETED || response.result == DocWriteResponse.Result.NOT_FOUND + response.result == DocWriteResponse.Result.DELETED || response.result == DocWriteResponse.Result.NOT_FOUND, ) } @@ -275,7 +282,7 @@ class LockService(private val client: Client, private val clusterService: Cluste listener.onFailure(e) } } - } + }, ) } @@ -283,8 +290,10 @@ class LockService(private val client: Client, private val clusterService: Cluste if (lockIndexExist()) { listener.onResponse(true) } else { - val indexRequest = CreateIndexRequest(LOCK_INDEX_NAME).mapping(lockMapping()) - .settings(Settings.builder().put("index.hidden", true).build()) + val indexRequest = + CreateIndexRequest(LOCK_INDEX_NAME) + .mapping(lockMapping()) + .settings(Settings.builder().put("index.hidden", true).build()) client.admin().indices().create( indexRequest, object : ActionListener { @@ -294,29 +303,25 @@ class LockService(private val client: Client, private val clusterService: Cluste override fun onFailure(ex: Exception) { log.error("Failed to update config index schema", ex) - if (ex is ResourceAlreadyExistsException || ex.cause is ResourceAlreadyExistsException - ) { + if (ex is ResourceAlreadyExistsException || ex.cause is ResourceAlreadyExistsException) { listener.onResponse(true) } else { listener.onFailure(ex) } } - } + }, ) } } - private fun isLockReleased(lock: LockModel): Boolean { - return lock.released - } + private fun isLockReleased(lock: LockModel): Boolean = lock.released - private fun getNow(): Instant { - return if (testInstant != null) { + private fun getNow(): Instant = + if (testInstant != null) { testInstant!! } else { Instant.now() } - } fun setTime(testInstant: Instant) { this.testInstant = testInstant diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/ppl/PPLPluginInterface.kt b/core/src/main/kotlin/org/opensearch/alerting/core/ppl/PPLPluginInterface.kt index a121f6b93..898b45ea6 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/ppl/PPLPluginInterface.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/ppl/PPLPluginInterface.kt @@ -24,21 +24,22 @@ object PPLPluginInterface { request: TransportPPLQueryRequest, listener: ActionListener, ) { - - val responseReader = Writeable.Reader { - TransportPPLQueryResponse(it) - } - - val wrappedListener = object : ActionListener { - override fun onResponse(response: ActionResponse) { - val recreated = recreateObject(response) { TransportPPLQueryResponse(it) } - listener.onResponse(recreated) + val responseReader = + Writeable.Reader { + TransportPPLQueryResponse(it) } - override fun onFailure(exception: Exception) { - listener.onFailure(exception) + val wrappedListener = + object : ActionListener { + override fun onResponse(response: ActionResponse) { + val recreated = recreateObject(response) { TransportPPLQueryResponse(it) } + listener.onResponse(recreated) + } + + override fun onFailure(exception: Exception) { + listener.onFailure(exception) + } } - } transportService.sendRequest( localNode, @@ -50,7 +51,7 @@ object PPLPluginInterface { .build(), object : ActionListenerResponseHandler( wrappedListener, - responseReader + responseReader, ) { override fun handleResponse(response: ActionResponse) { wrappedListener.onResponse(response) @@ -59,7 +60,7 @@ object PPLPluginInterface { override fun handleException(e: TransportException) { wrappedListener.onFailure(e) } - } + }, ) } } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/RestScheduledJobStatsHandler.kt b/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/RestScheduledJobStatsHandler.kt index 1ee6e3bde..0cd115710 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/RestScheduledJobStatsHandler.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/RestScheduledJobStatsHandler.kt @@ -18,46 +18,45 @@ import org.opensearch.transport.client.node.NodeClient /** * RestScheduledJobStatsHandler is handler for getting ScheduledJob Stats. */ -class RestScheduledJobStatsHandler(private val path: String) : BaseRestHandler() { +class RestScheduledJobStatsHandler( + private val path: String, +) : BaseRestHandler() { + override fun getName(): String = "${path}_jobs_stats" - override fun getName(): String { - return "${path}_jobs_stats" - } - - override fun routes(): List { - return listOf() - } + override fun routes(): List = listOf() - override fun replacedRoutes(): MutableList { - return mutableListOf( + override fun replacedRoutes(): MutableList = + mutableListOf( RestHandler.ReplacedRoute( GET, "/_plugins/$path/{nodeId}/stats/", GET, - "/_opendistro/$path/{nodeId}/stats/" + "/_opendistro/$path/{nodeId}/stats/", ), RestHandler.ReplacedRoute( GET, "/_plugins/$path/{nodeId}/stats/{metric}", GET, - "/_opendistro/$path/{nodeId}/stats/{metric}" + "/_opendistro/$path/{nodeId}/stats/{metric}", ), RestHandler.ReplacedRoute( GET, "/_plugins/$path/stats/", GET, - "/_opendistro/$path/stats/" + "/_opendistro/$path/stats/", ), RestHandler.ReplacedRoute( GET, "/_plugins/$path/stats/{metric}", GET, - "/_opendistro/$path/stats/{metric}" - ) + "/_opendistro/$path/stats/{metric}", + ), ) - } - override fun prepareRequest(request: RestRequest, client: NodeClient): RestChannelConsumer { + override fun prepareRequest( + request: RestRequest, + client: NodeClient, + ): RestChannelConsumer { val alertingVersion = request.param("version") if (alertingVersion != null && alertingVersion !in listOf("v1", "v2")) { throw IllegalArgumentException("Version parameter must be one of v1 or v2") @@ -70,7 +69,7 @@ class RestScheduledJobStatsHandler(private val path: String) : BaseRestHandler() client.execute( ScheduledJobsStatsAction.INSTANCE, scheduledJobNodesStatsRequest, - RestActions.NodesResponseRestListener(channel) + RestActions.NodesResponseRestListener(channel), ) } } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/StatsRequestUtils.kt b/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/StatsRequestUtils.kt index 58b33d709..2997b4b53 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/StatsRequestUtils.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/resthandler/StatsRequestUtils.kt @@ -7,25 +7,26 @@ import java.util.Locale import java.util.TreeSet internal object StatsRequestUtils { - const val JOB_SCHEDULING_METRICS: String = "job_scheduling_metrics" const val JOBS_INFO: String = "jobs_info" - val METRICS = mapOf Unit>( - JOB_SCHEDULING_METRICS to { it.jobSchedulingMetrics = true }, - JOBS_INFO to { it.jobsInfo = true } - ) + val METRICS = + mapOf Unit>( + JOB_SCHEDULING_METRICS to { it.jobSchedulingMetrics = true }, + JOBS_INFO to { it.jobsInfo = true }, + ) fun getStatsRequest( request: RestRequest, showAlertingV2ScheduledJobs: Boolean?, - unrecognizedFn: (RestRequest, Set, Set, String) -> String + unrecognizedFn: (RestRequest, Set, Set, String) -> String, ): ScheduledJobsStatsRequest { val nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")) val metrics = Strings.tokenizeByCommaToSet(request.param("metric")) - val scheduledJobsStatsRequest = ScheduledJobsStatsRequest( - nodeIds = nodesIds, - showAlertingV2ScheduledJobs = showAlertingV2ScheduledJobs - ) + val scheduledJobsStatsRequest = + ScheduledJobsStatsRequest( + nodeIds = nodesIds, + showAlertingV2ScheduledJobs = showAlertingV2ScheduledJobs, + ) scheduledJobsStatsRequest.timeout(request.param("timeout")) if (metrics.isEmpty()) { @@ -38,8 +39,8 @@ internal object StatsRequestUtils { Locale.ROOT, "request [%s] contains _all and individual metrics [%s]", request.path(), - request.param("metric") - ) + request.param("metric"), + ), ) } else { // use a sorted set so the unrecognized parameters appear in a reliable sorted order diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt b/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt index 8245e1a78..2a055a433 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobScheduler.kt @@ -22,7 +22,10 @@ import java.util.stream.Collectors * * JobScheduler is unaware of the ScheduledJob version and it is up to callers to ensure that the older version of ScheduledJob to be descheduled and schedule the new version. */ -class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: JobRunner) { +class JobScheduler( + private val threadPool: ThreadPool, + private val jobRunner: JobRunner, +) { private val logger = LogManager.getLogger(JobScheduler::class.java) /** @@ -41,11 +44,10 @@ class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: Jo * * @return List of jobs that could not be scheduled */ - fun schedule(vararg jobsToSchedule: ScheduledJob): List { - return jobsToSchedule.filter { + fun schedule(vararg jobsToSchedule: ScheduledJob): List = + jobsToSchedule.filter { !this.schedule(it) } - } /** * Schedules a single [scheduledJob] @@ -74,9 +76,10 @@ class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: Jo return false } - val scheduledJobInfo = scheduledJobIdToInfo.getOrPut(scheduledJob.id) { - ScheduledJobInfo(scheduledJob.id, scheduledJob) - } + val scheduledJobInfo = + scheduledJobIdToInfo.getOrPut(scheduledJob.id) { + ScheduledJobInfo(scheduledJob.id, scheduledJob) + } if (scheduledJobInfo.scheduledCancellable != null) { // This means that the given ScheduledJob already has schedule running. We should not schedule any more. return true @@ -93,15 +96,15 @@ class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: Jo * * @return List of job ids failed to deschedule. */ - fun deschedule(ids: Collection): List { - return ids.filter { - !this.deschedule(it) - }.also { - if (it.isNotEmpty()) { - logger.error("Unable to deschedule jobs $it") + fun deschedule(ids: Collection): List = + ids + .filter { + !this.deschedule(it) + }.also { + if (it.isNotEmpty()) { + logger.error("Unable to deschedule jobs $it") + } } - } - } /** * Mark the scheduledJob as descheduled and try to cancel any future schedule for given scheduledJob id. @@ -144,18 +147,21 @@ class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: Jo /** * @return list of jobIds that are scheduled. */ - fun scheduledJobs(): Set { - return scheduledJobIdToInfo.keys - } + fun scheduledJobs(): Set = scheduledJobIdToInfo.keys - private fun reschedule(scheduleJob: ScheduledJob, scheduledJobInfo: ScheduledJobInfo): Boolean { + private fun reschedule( + scheduleJob: ScheduledJob, + scheduledJobInfo: ScheduledJobInfo, + ): Boolean { if (scheduleJob.enabledTime == null) { logger.info("${scheduleJob.name} there is no enabled time. This job should never have been scheduled.") return false } - scheduledJobInfo.expectedNextExecutionTime = scheduleJob.schedule.getExpectedNextExecutionTime( - scheduleJob.enabledTime!!, scheduledJobInfo.expectedNextExecutionTime - ) + scheduledJobInfo.expectedNextExecutionTime = + scheduleJob.schedule.getExpectedNextExecutionTime( + scheduleJob.enabledTime!!, + scheduledJobInfo.expectedNextExecutionTime, + ) // Validate if there is next execution that needs to happen. // e.g cron job that is expected to run in 30th of Feb (which doesn't exist). "0/5 * 30 2 *" @@ -167,21 +173,22 @@ class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: Jo val duration = Duration.between(Instant.now(), scheduledJobInfo.expectedNextExecutionTime) // Create anonymous runnable. - val runnable = Runnable { - // Check again if the scheduled job is marked descheduled. - if (scheduledJobInfo.descheduled) { - return@Runnable // skip running job if job is marked descheduled. - } + val runnable = + Runnable { + // Check again if the scheduled job is marked descheduled. + if (scheduledJobInfo.descheduled) { + return@Runnable // skip running job if job is marked descheduled. + } - // Order of operations inside here matter, we specifically call getPeriodEndingAt before reschedule because - // reschedule will update expectedNextExecutionTime to the next one which would throw off the startTime/endTime - val (startTime, endTime) = scheduleJob.schedule.getPeriodEndingAt(scheduledJobInfo.expectedNextExecutionTime) - scheduledJobInfo.actualPreviousExecutionTime = Instant.now() + // Order of operations inside here matter, we specifically call getPeriodEndingAt before reschedule because + // reschedule will update expectedNextExecutionTime to the next one which would throw off the startTime/endTime + val (startTime, endTime) = scheduleJob.schedule.getPeriodEndingAt(scheduledJobInfo.expectedNextExecutionTime) + scheduledJobInfo.actualPreviousExecutionTime = Instant.now() - this.reschedule(scheduleJob, scheduledJobInfo) + this.reschedule(scheduleJob, scheduledJobInfo) - jobRunner.runJob(scheduleJob, startTime, endTime) - } + jobRunner.runJob(scheduleJob, startTime, endTime) + } // Check descheduled flag as close as possible before we actually schedule a job. // This way we will can minimize race conditions. @@ -200,24 +207,26 @@ class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: Jo fun getJobSchedulerMetric(showAlertingV2ScheduledJobs: Boolean?): List { val scheduledJobEntries = scheduledJobIdToInfo.entries - val filteredScheduledJobEntries = if (showAlertingV2ScheduledJobs == null) { - // if no alerting version was specified, do not filter - scheduledJobEntries - } else if (showAlertingV2ScheduledJobs) { - scheduledJobEntries.filter { it.value.scheduledJob.type == monitorV2Type } - } else { - scheduledJobEntries.filter { it.value.scheduledJob.type != monitorV2Type } - } + val filteredScheduledJobEntries = + if (showAlertingV2ScheduledJobs == null) { + // if no alerting version was specified, do not filter + scheduledJobEntries + } else if (showAlertingV2ScheduledJobs) { + scheduledJobEntries.filter { it.value.scheduledJob.type == monitorV2Type } + } else { + scheduledJobEntries.filter { it.value.scheduledJob.type != monitorV2Type } + } - return filteredScheduledJobEntries.stream() + return filteredScheduledJobEntries + .stream() .map { entry -> JobSchedulerMetrics( entry.value.scheduledJobId, entry.value.actualPreviousExecutionTime?.toEpochMilli(), - entry.value.scheduledJob.schedule.runningOnTime(entry.value.actualPreviousExecutionTime) + entry.value.scheduledJob.schedule + .runningOnTime(entry.value.actualPreviousExecutionTime), ) - } - .collect(Collectors.toList()) + }.collect(Collectors.toList()) } fun postIndex(job: ScheduledJob) { @@ -240,6 +249,6 @@ class JobScheduler(private val threadPool: ThreadPool, private val jobRunner: Jo var descheduled: Boolean = false, var actualPreviousExecutionTime: Instant? = null, var expectedNextExecutionTime: Instant? = null, - var scheduledCancellable: Scheduler.ScheduledCancellable? = null + var scheduledCancellable: Scheduler.ScheduledCancellable? = null, ) } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerMetrics.kt b/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerMetrics.kt index dff1ecd52..d7ea4ee90 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerMetrics.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerMetrics.kt @@ -13,7 +13,9 @@ import org.opensearch.core.xcontent.ToXContentFragment import org.opensearch.core.xcontent.XContentBuilder import java.time.Instant -class JobSchedulerMetrics : ToXContentFragment, Writeable { +class JobSchedulerMetrics : + ToXContentFragment, + Writeable { val scheduledJobId: String val lastExecutionTime: Long? val runningOnTime: Boolean @@ -36,12 +38,17 @@ class JobSchedulerMetrics : ToXContentFragment, Writeable { out.writeBoolean(runningOnTime) } - override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { - if (lastExecutionTime != null) + override fun toXContent( + builder: XContentBuilder, + params: ToXContent.Params, + ): XContentBuilder { + if (lastExecutionTime != null) { builder.timeField( - "last_execution_time", "last_execution_time_in_millis", - Instant.ofEpochMilli(lastExecutionTime).toEpochMilli() + "last_execution_time", + "last_execution_time_in_millis", + Instant.ofEpochMilli(lastExecutionTime).toEpochMilli(), ) + } builder.field("running_on_time", runningOnTime) return builder } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/settings/AlertingV2Settings.kt b/core/src/main/kotlin/org/opensearch/alerting/core/settings/AlertingV2Settings.kt index fd92dd5e9..97915430f 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/settings/AlertingV2Settings.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/settings/AlertingV2Settings.kt @@ -13,10 +13,12 @@ import org.opensearch.common.settings.Setting */ class AlertingV2Settings { companion object { - val ALERTING_V2_ENABLED = Setting.boolSetting( - "plugins.alerting.v2.enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val ALERTING_V2_ENABLED = + Setting.boolSetting( + "plugins.alerting.v2.enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/settings/LegacyOpenDistroScheduledJobSettings.kt b/core/src/main/kotlin/org/opensearch/alerting/core/settings/LegacyOpenDistroScheduledJobSettings.kt index 3a37ff97f..41a6c63a1 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/settings/LegacyOpenDistroScheduledJobSettings.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/settings/LegacyOpenDistroScheduledJobSettings.kt @@ -8,42 +8,59 @@ import org.opensearch.common.unit.TimeValue */ class LegacyOpenDistroScheduledJobSettings { - companion object { - val SWEEPER_ENABLED = Setting.boolSetting( - "opendistro.scheduled_jobs.enabled", - true, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val REQUEST_TIMEOUT = Setting.positiveTimeSetting( - "opendistro.scheduled_jobs.request_timeout", - TimeValue.timeValueSeconds(10), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val SWEEP_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "opendistro.scheduled_jobs.sweeper.backoff_millis", - TimeValue.timeValueMillis(50), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val SWEEP_BACKOFF_RETRY_COUNT = Setting.intSetting( - "opendistro.scheduled_jobs.retry_count", - 3, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val SWEEP_PERIOD = Setting.positiveTimeSetting( - "opendistro.scheduled_jobs.sweeper.period", - TimeValue.timeValueMinutes(5), - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) - - val SWEEP_PAGE_SIZE = Setting.intSetting( - "opendistro.scheduled_jobs.sweeper.page_size", - 100, - Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated - ) + val SWEEPER_ENABLED = + Setting.boolSetting( + "opendistro.scheduled_jobs.enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val REQUEST_TIMEOUT = + Setting.positiveTimeSetting( + "opendistro.scheduled_jobs.request_timeout", + TimeValue.timeValueSeconds(10), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val SWEEP_BACKOFF_MILLIS = + Setting.positiveTimeSetting( + "opendistro.scheduled_jobs.sweeper.backoff_millis", + TimeValue.timeValueMillis(50), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val SWEEP_BACKOFF_RETRY_COUNT = + Setting.intSetting( + "opendistro.scheduled_jobs.retry_count", + 3, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val SWEEP_PERIOD = + Setting.positiveTimeSetting( + "opendistro.scheduled_jobs.sweeper.period", + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) + + val SWEEP_PAGE_SIZE = + Setting.intSetting( + "opendistro.scheduled_jobs.sweeper.page_size", + 100, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated, + ) } } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt b/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt index 6bdb18bec..91a49095e 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/settings/ScheduledJobSettings.kt @@ -11,41 +11,52 @@ import org.opensearch.common.settings.Setting * settings used for [ScheduledJob]'s. These include back off settings, retry counts, timeouts etc... */ class ScheduledJobSettings { - companion object { - val SWEEPER_ENABLED = Setting.boolSetting( - "plugins.scheduled_jobs.enabled", - LegacyOpenDistroScheduledJobSettings.SWEEPER_ENABLED, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - val REQUEST_TIMEOUT = Setting.positiveTimeSetting( - "plugins.scheduled_jobs.request_timeout", - LegacyOpenDistroScheduledJobSettings.REQUEST_TIMEOUT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val SWEEP_BACKOFF_MILLIS = Setting.positiveTimeSetting( - "plugins.scheduled_jobs.sweeper.backoff_millis", - LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_MILLIS, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val SWEEP_BACKOFF_RETRY_COUNT = Setting.intSetting( - "plugins.scheduled_jobs.retry_count", - LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_RETRY_COUNT, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val SWEEP_PERIOD = Setting.positiveTimeSetting( - "plugins.scheduled_jobs.sweeper.period", - LegacyOpenDistroScheduledJobSettings.SWEEP_PERIOD, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) - - val SWEEP_PAGE_SIZE = Setting.intSetting( - "plugins.scheduled_jobs.sweeper.page_size", - LegacyOpenDistroScheduledJobSettings.SWEEP_PAGE_SIZE, - Setting.Property.NodeScope, Setting.Property.Dynamic - ) + val SWEEPER_ENABLED = + Setting.boolSetting( + "plugins.scheduled_jobs.enabled", + LegacyOpenDistroScheduledJobSettings.SWEEPER_ENABLED, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + val REQUEST_TIMEOUT = + Setting.positiveTimeSetting( + "plugins.scheduled_jobs.request_timeout", + LegacyOpenDistroScheduledJobSettings.REQUEST_TIMEOUT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val SWEEP_BACKOFF_MILLIS = + Setting.positiveTimeSetting( + "plugins.scheduled_jobs.sweeper.backoff_millis", + LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_MILLIS, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val SWEEP_BACKOFF_RETRY_COUNT = + Setting.intSetting( + "plugins.scheduled_jobs.retry_count", + LegacyOpenDistroScheduledJobSettings.SWEEP_BACKOFF_RETRY_COUNT, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val SWEEP_PERIOD = + Setting.positiveTimeSetting( + "plugins.scheduled_jobs.sweeper.period", + LegacyOpenDistroScheduledJobSettings.SWEEP_PERIOD, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) + + val SWEEP_PAGE_SIZE = + Setting.intSetting( + "plugins.scheduled_jobs.sweeper.page_size", + LegacyOpenDistroScheduledJobSettings.SWEEP_PAGE_SIZE, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + ) } } diff --git a/core/src/main/kotlin/org/opensearch/alerting/core/util/XContentExtensions.kt b/core/src/main/kotlin/org/opensearch/alerting/core/util/XContentExtensions.kt index 9ca03ed6b..316e13ae7 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/core/util/XContentExtensions.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/core/util/XContentExtensions.kt @@ -8,6 +8,7 @@ package org.opensearch.alerting.core.util import org.opensearch.core.xcontent.XContentBuilder import java.time.Instant -fun XContentBuilder.nonOptionalTimeField(name: String, instant: Instant): XContentBuilder { - return this.timeField(name, "${name}_in_millis", instant.toEpochMilli()) -} +fun XContentBuilder.nonOptionalTimeField( + name: String, + instant: Instant, +): XContentBuilder = this.timeField(name, "${name}_in_millis", instant.toEpochMilli()) diff --git a/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt b/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt index fd500ef1d..f78c83796 100644 --- a/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt +++ b/core/src/main/kotlin/org/opensearch/alerting/opensearchapi/OpenSearchExtensions.kt @@ -71,7 +71,7 @@ fun BackoffPolicy.retry(block: () -> T): T { */ suspend fun BackoffPolicy.retryForNotification( logger: Logger, - block: suspend () -> T + block: suspend () -> T, ): T { val iter = iterator() do { @@ -107,7 +107,7 @@ suspend fun BackoffPolicy.retryForNotification( suspend fun BackoffPolicy.retry( logger: Logger, retryOn: List = emptyList(), - block: suspend () -> T + block: suspend () -> T, ): T { val iter = iterator() do { @@ -129,15 +129,15 @@ suspend fun BackoffPolicy.retry( * Retries on 502, 503 and 504 per elastic client's behavior: https://github.com/elastic/elasticsearch-net/issues/2061 * 429 must be retried manually as it's not clear if it's ok to retry for requests other than Bulk requests. */ -fun OpenSearchException.isRetriable(): Boolean { - return (status() in listOf(BAD_GATEWAY, SERVICE_UNAVAILABLE, GATEWAY_TIMEOUT)) -} +fun OpenSearchException.isRetriable(): Boolean = (status() in listOf(BAD_GATEWAY, SERVICE_UNAVAILABLE, GATEWAY_TIMEOUT)) -fun SearchResponse.firstFailureOrNull(): ShardSearchFailure? { - return shardFailures?.getOrNull(0) -} +fun SearchResponse.firstFailureOrNull(): ShardSearchFailure? = shardFailures?.getOrNull(0) -fun addFilter(user: User, searchSourceBuilder: SearchSourceBuilder, fieldName: String) { +fun addFilter( + user: User, + searchSourceBuilder: SearchSourceBuilder, + fieldName: String, +) { val filterBackendRoles = QueryBuilders.termsQuery(fieldName, user.backendRoles) val queryBuilder = searchSourceBuilder.query() as BoolQueryBuilder searchSourceBuilder.query(queryBuilder.filter(filterBackendRoles)) @@ -150,11 +150,13 @@ fun addFilter(user: User, searchSourceBuilder: SearchSourceBuilder, fieldName: S */ suspend fun C.suspendUntil(block: C.(ActionListener) -> Unit): T = suspendCoroutine { cont -> - block(object : ActionListener { - override fun onResponse(response: T) = cont.resume(response) + block( + object : ActionListener { + override fun onResponse(response: T) = cont.resume(response) - override fun onFailure(e: Exception) = cont.resumeWithException(e) - }) + override fun onFailure(e: Exception) = cont.resumeWithException(e) + }, + ) } /** @@ -164,11 +166,13 @@ suspend fun C.suspendUntil(block: C.(ActionListener */ suspend fun NotificationsPluginInterface.suspendUntil(block: NotificationsPluginInterface.(ActionListener) -> Unit): T = suspendCoroutine { cont -> - block(object : ActionListener { - override fun onResponse(response: T) = cont.resume(response) + block( + object : ActionListener { + override fun onResponse(response: T) = cont.resume(response) - override fun onFailure(e: Exception) = cont.resumeWithException(e) - }) + override fun onFailure(e: Exception) = cont.resumeWithException(e) + }, + ) } /** @@ -178,11 +182,13 @@ suspend fun NotificationsPluginInterface.suspendUntil(block: NotificationsPl */ suspend fun PPLPluginInterface.suspendUntil(block: PPLPluginInterface.(ActionListener) -> Unit): T = suspendCoroutine { cont -> - block(object : ActionListener { - override fun onResponse(response: T) = cont.resume(response) + block( + object : ActionListener { + override fun onResponse(response: T) = cont.resume(response) - override fun onFailure(e: Exception) = cont.resumeWithException(e) - }) + override fun onFailure(e: Exception) = cont.resumeWithException(e) + }, + ) } class InjectorContextElement( @@ -190,10 +196,10 @@ class InjectorContextElement( settings: Settings, threadContext: ThreadContext, private val roles: List?, - private val user: User? = null + private val user: User? = null, ) : ThreadContextElement { - companion object Key : CoroutineContext.Key + override val key: CoroutineContext.Key<*> get() = Key @@ -205,14 +211,17 @@ class InjectorContextElement( rolesInjectorHelper.injectUserInfo(user) } - override fun restoreThreadContext(context: CoroutineContext, oldState: Unit) { + override fun restoreThreadContext( + context: CoroutineContext, + oldState: Unit, + ) { rolesInjectorHelper.close() } } suspend fun withClosableContext( context: InjectorContextElement, - block: suspend CoroutineScope.() -> T + block: suspend CoroutineScope.() -> T, ): T { try { return withContext(context) { block() } diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/WriteableTests.kt b/core/src/test/kotlin/org/opensearch/alerting/core/WriteableTests.kt index f48ffa370..5636d0b07 100644 --- a/core/src/test/kotlin/org/opensearch/alerting/core/WriteableTests.kt +++ b/core/src/test/kotlin/org/opensearch/alerting/core/WriteableTests.kt @@ -13,7 +13,6 @@ import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.test.OpenSearchTestCase.assertEquals class WriteableTests { - @Test fun `test jobschedule metrics as stream`() { val metrics = JobSchedulerMetrics("test", DateTime.now().millis, false) diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt b/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt index 08e3fb8c4..860a6e155 100644 --- a/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt +++ b/core/src/test/kotlin/org/opensearch/alerting/core/model/MockScheduledJob.kt @@ -16,13 +16,19 @@ class MockScheduledJob( override val enabled: Boolean, override val schedule: Schedule, override var lastUpdateTime: Instant, - override val enabledTime: Instant? + override val enabledTime: Instant?, ) : ScheduledJob { - override fun fromDocument(id: String, version: Long): ScheduledJob { + override fun fromDocument( + id: String, + version: Long, + ): ScheduledJob { TODO("not implemented") } - override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + override fun toXContent( + builder: XContentBuilder?, + params: ToXContent.Params?, + ): XContentBuilder { TODO("not implemented") } diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt index a0453e935..70e7f5738 100644 --- a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt +++ b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/JobSchedulerTest.kt @@ -20,7 +20,6 @@ import kotlin.test.assertFalse import kotlin.test.assertTrue class JobSchedulerTest { - private var testSettings: Settings = Settings.builder().put("node.name", "node-0").build() private val testThreadPool = ThreadPool(testSettings) private var jobRunner: MockJobRunner = MockJobRunner() @@ -34,16 +33,17 @@ class JobSchedulerTest { @Test fun `schedule and deschedule`() { - val mockScheduledJob = MockScheduledJob( - "mockScheduledJob-id", - 1L, - "mockScheduledJob-name", - "MockScheduledJob", - true, - IntervalSchedule(1, ChronoUnit.MINUTES), - Instant.now(), - Instant.now() - ) + val mockScheduledJob = + MockScheduledJob( + "mockScheduledJob-id", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + true, + IntervalSchedule(1, ChronoUnit.MINUTES), + Instant.now(), + Instant.now(), + ) assertTrue(jobScheduler.schedule(mockScheduledJob)) @@ -58,16 +58,17 @@ class JobSchedulerTest { val cronExpression = "0/5 * 30 2 *" val jobRunner = MockJobRunner() val jobScheduler = JobScheduler(testThreadPool, jobRunner) - val mockScheduledJob = MockScheduledJob( - "mockScheduledJob-id", - 1L, - "mockScheduledJob-name", - "MockScheduledJob", - true, - CronSchedule(cronExpression, ZoneId.of("UTC")), - Instant.now(), - Instant.now() - ) + val mockScheduledJob = + MockScheduledJob( + "mockScheduledJob-id", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + true, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + Instant.now(), + ) assertTrue(jobScheduler.schedule(mockScheduledJob)) assertEquals(setOf("mockScheduledJob-id"), jobScheduler.scheduledJobs(), "List of ScheduledJobs are not the same.") @@ -82,16 +83,17 @@ class JobSchedulerTest { val cronExpression = "0/5 * * * *" val jobRunner = MockJobRunner() val jobScheduler = JobScheduler(testThreadPool, jobRunner) - val mockScheduledJob = MockScheduledJob( - "mockScheduledJob-id", - 1L, - "mockScheduledJob-name", - "MockScheduledJob", - false, - CronSchedule(cronExpression, ZoneId.of("UTC")), - Instant.now(), - Instant.now() - ) + val mockScheduledJob = + MockScheduledJob( + "mockScheduledJob-id", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + false, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + Instant.now(), + ) assertFalse(jobScheduler.schedule(mockScheduledJob), "We should return false if we try to schedule disabled schedule.") assertEquals(setOf(), jobScheduler.scheduledJobs(), "List of ScheduledJobs are not the same.") @@ -102,16 +104,17 @@ class JobSchedulerTest { val cronExpression = "0/5 * * * *" val jobRunner = MockJobRunner() val jobScheduler = JobScheduler(testThreadPool, jobRunner) - val mockScheduledJob = MockScheduledJob( - "mockScheduledJob-id", - 1L, - "mockScheduledJob-name", - "MockScheduledJob", - true, - CronSchedule(cronExpression, ZoneId.of("UTC")), - Instant.now(), - Instant.now() - ) + val mockScheduledJob = + MockScheduledJob( + "mockScheduledJob-id", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + true, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + Instant.now(), + ) assertTrue(jobScheduler.schedule(mockScheduledJob)) assertEquals(setOf("mockScheduledJob-id"), jobScheduler.scheduledJobs(), "List of ScheduledJobs are not the same.") @@ -125,26 +128,28 @@ class JobSchedulerTest { @Test fun `schedule multiple jobs`() { val cronExpression = "0/5 * * * *" - val mockScheduledJob1 = MockScheduledJob( - "mockScheduledJob-1", - 1L, - "mockScheduledJob-name", - "MockScheduledJob", - true, - CronSchedule(cronExpression, ZoneId.of("UTC")), - Instant.now(), - Instant.now() - ) - val mockScheduledJob2 = MockScheduledJob( - "mockScheduledJob-2", - 1L, - "mockScheduledJob-name", - "MockScheduledJob", - true, - CronSchedule(cronExpression, ZoneId.of("UTC")), - Instant.now(), - Instant.now() - ) + val mockScheduledJob1 = + MockScheduledJob( + "mockScheduledJob-1", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + true, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + Instant.now(), + ) + val mockScheduledJob2 = + MockScheduledJob( + "mockScheduledJob-2", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + true, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + Instant.now(), + ) assertTrue(jobScheduler.schedule(mockScheduledJob1, mockScheduledJob2).isEmpty()) } @@ -152,10 +157,17 @@ class JobSchedulerTest { @Test fun `schedule null enabled time job`() { val cronExpression = "0/5 * * * *" - val mockScheduledJob2 = MockScheduledJob( - "mockScheduledJob-2", 1L, "mockScheduledJob-name", "MockScheduledJob", true, - CronSchedule(cronExpression, ZoneId.of("UTC")), Instant.now(), null - ) + val mockScheduledJob2 = + MockScheduledJob( + "mockScheduledJob-2", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + true, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + null, + ) assertFalse(jobScheduler.schedule(mockScheduledJob2)) } @@ -163,10 +175,17 @@ class JobSchedulerTest { @Test fun `schedule disabled job`() { val cronExpression = "0/5 * * * *" - val mockScheduledJob1 = MockScheduledJob( - "mockScheduledJob-1", 1L, "mockScheduledJob-name", "MockScheduledJob", false, - CronSchedule(cronExpression, ZoneId.of("UTC")), Instant.now(), Instant.now() - ) + val mockScheduledJob1 = + MockScheduledJob( + "mockScheduledJob-1", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + false, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + Instant.now(), + ) assertFalse(jobScheduler.schedule(mockScheduledJob1)) } @@ -174,16 +193,17 @@ class JobSchedulerTest { @Test fun `run Job`() { val cronExpression = "0/5 * * * *" - val mockScheduledJob = MockScheduledJob( - "mockScheduledJob-id", - 1L, - "mockScheduledJob-name", - "MockScheduledJob", - true, - CronSchedule(cronExpression, ZoneId.of("UTC")), - Instant.now(), - Instant.now() - ) + val mockScheduledJob = + MockScheduledJob( + "mockScheduledJob-id", + 1L, + "mockScheduledJob-name", + "MockScheduledJob", + true, + CronSchedule(cronExpression, ZoneId.of("UTC")), + Instant.now(), + Instant.now(), + ) jobRunner.runJob(mockScheduledJob, Instant.now(), Instant.now()) } diff --git a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt index 15fe770b9..b30757620 100644 --- a/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt +++ b/core/src/test/kotlin/org/opensearch/alerting/core/schedule/MockJobRunner.kt @@ -25,7 +25,11 @@ class MockJobRunner : JobRunner { numberOfIndex++ } - override fun runJob(job: ScheduledJob, periodStart: Instant, periodEnd: Instant) { + override fun runJob( + job: ScheduledJob, + periodStart: Instant, + periodEnd: Instant, + ) { numberOfRun++ } } diff --git a/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunner.kt b/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunner.kt index becb25f92..394cd8ad9 100644 --- a/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunner.kt +++ b/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunner.kt @@ -36,21 +36,16 @@ import kotlin.coroutines.resumeWithException import kotlin.coroutines.suspendCoroutine open class RemoteMonitorRunner { - open fun runMonitor( monitor: Monitor, periodStart: Instant, periodEnd: Instant, dryrun: Boolean, executionId: String, - transportService: TransportService - ): MonitorRunResult { - return MonitorRunResult(monitor.name, periodStart, periodEnd) - } + transportService: TransportService, + ): MonitorRunResult = MonitorRunResult(monitor.name, periodStart, periodEnd) - open fun getFanOutAction(): String { - throw UnsupportedOperationException("Fan out action needs to be implemented by remote monitor."); - } + open fun getFanOutAction(): String = throw UnsupportedOperationException("Fan out action needs to be implemented by remote monitor.") open suspend fun doFanOut( clusterService: ClusterService, @@ -62,48 +57,53 @@ open class RemoteMonitorRunner { dryrun: Boolean, transportService: TransportService, nodeMap: Map, - nodeShardAssignments: Map> - ): MutableList { - return suspendCoroutine { cont -> - val listener = GroupedActionListener( - object : ActionListener> { - override fun onResponse(response: Collection) { - cont.resume(response.toMutableList()) - } + nodeShardAssignments: Map>, + ): MutableList = + suspendCoroutine { cont -> + val listener = + GroupedActionListener( + object : ActionListener> { + override fun onResponse(response: Collection) { + cont.resume(response.toMutableList()) + } - override fun onFailure(e: Exception) { - if (e.cause is Exception) - cont.resumeWithException(e.cause as Exception) - else - cont.resumeWithException(e) - } - }, - nodeShardAssignments.size - ) - val responseReader = Writeable.Reader { - DocLevelMonitorFanOutResponse(it) - } + override fun onFailure(e: Exception) { + if (e.cause is Exception) { + cont.resumeWithException(e.cause as Exception) + } else { + cont.resumeWithException(e) + } + } + }, + nodeShardAssignments.size, + ) + val responseReader = + Writeable.Reader { + DocLevelMonitorFanOutResponse(it) + } for (node in nodeMap) { if (nodeShardAssignments.containsKey(node.key)) { - val docLevelMonitorFanOutRequest = DocLevelMonitorFanOutRequest( - monitor, - dryrun, - monitorMetadata, - executionId, - indexExecutionContext = IndexExecutionContext( - listOf(), - mutableMapOf(), - mutableMapOf(), - "", - "", - listOf(), - listOf(), - listOf() - ), - nodeShardAssignments[node.key]!!.toList(), - concreteIndices, - workflowRunContext - ) + val docLevelMonitorFanOutRequest = + DocLevelMonitorFanOutRequest( + monitor, + dryrun, + monitorMetadata, + executionId, + indexExecutionContext = + IndexExecutionContext( + listOf(), + mutableMapOf(), + mutableMapOf(), + "", + "", + listOf(), + listOf(), + listOf(), + ), + nodeShardAssignments[node.key]!!.toList(), + concreteIndices, + workflowRunContext, + ) transportService.sendRequest( node.value, @@ -112,19 +112,19 @@ open class RemoteMonitorRunner { TransportRequestOptions.EMPTY, object : ActionListenerResponseHandler( listener, - responseReader + responseReader, ) { override fun handleException(e: TransportException) { val cause = e.unwrapCause() if (cause is ConnectTransportException || ( - e is RemoteTransportException && - ( - cause is NodeClosedException || - cause is CircuitBreakingException || - cause is ActionNotFoundTransportException - ) + e is RemoteTransportException && + ( + cause is NodeClosedException || + cause is CircuitBreakingException || + cause is ActionNotFoundTransportException ) + ) ) { val localNode = clusterService.localNode() // retry in local node @@ -136,7 +136,7 @@ open class RemoteMonitorRunner { object : ActionListenerResponseHandler( listener, - responseReader + responseReader, ) { override fun handleException(e: TransportException) { listener.onResponse( @@ -145,19 +145,20 @@ open class RemoteMonitorRunner { "", "", mutableMapOf(), - exception = if (e.cause is AlertingException) { - e.cause as AlertingException - } else { - AlertingException.wrap(e) as AlertingException - } - ) + exception = + if (e.cause is AlertingException) { + e.cause as AlertingException + } else { + AlertingException.wrap(e) as AlertingException + }, + ), ) } override fun handleResponse(response: DocLevelMonitorFanOutResponse) { listener.onResponse(response) } - } + }, ) } else { listener.onResponse( @@ -166,12 +167,13 @@ open class RemoteMonitorRunner { "", "", mutableMapOf(), - exception = if (e.cause is AlertingException) { - e.cause as AlertingException - } else { - AlertingException.wrap(e) as AlertingException - } - ) + exception = + if (e.cause is AlertingException) { + e.cause as AlertingException + } else { + AlertingException.wrap(e) as AlertingException + }, + ), ) } } @@ -179,10 +181,9 @@ open class RemoteMonitorRunner { override fun handleResponse(response: DocLevelMonitorFanOutResponse) { listener.onResponse(response) } - } + }, ) } } } - } } diff --git a/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunnerExtension.kt b/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunnerExtension.kt index eb07278da..d98ca4758 100644 --- a/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunnerExtension.kt +++ b/spi/src/main/kotlin/org/opensearch/alerting/spi/RemoteMonitorRunnerExtension.kt @@ -6,6 +6,5 @@ package org.opensearch.alerting.spi interface RemoteMonitorRunnerExtension { - fun getMonitorTypesToMonitorRunners(): Map -} \ No newline at end of file +}