diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b7484..184c50dcc6 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -35178,6 +35178,43 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: The `add_hostname` processor adds the hostname to log events. + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. properties: @@ -35600,24 +35637,27 @@ components: ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: @@ -37279,6 +37319,72 @@ components: type: string x-enum-varnames: - PARSE_JSON + ObservabilityPipelineParseXMLProcessor: + description: The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. + enum: + - parse_xml + example: parse_xml + type: string + x-enum-varnames: + - PARSE_XML ObservabilityPipelinePipelineKafkaSourceSaslMechanism: description: SASL mechanism used for Kafka authentication. enum: @@ -38442,6 +38548,68 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: The `split_array` processor splits array fields into separate events + based on configured rules. + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: description: The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). diff --git a/docs/datadog_api_client.v2.model.rst b/docs/datadog_api_client.v2.model.rst index 2e859a4f0f..0624168ff0 100644 --- a/docs/datadog_api_client.v2.model.rst +++ b/docs/datadog_api_client.v2.model.rst @@ -15369,6 +15369,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_add\_fields\_processor\_t :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_data\_firehose\_source module -------------------------------------------------------------------------------------------- @@ -16153,6 +16167,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor\_t :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_pipeline\_kafka\_source\_sasl\_mechanism module ------------------------------------------------------------------------------------------------------ @@ -16720,6 +16748,27 @@ datadog\_api\_client.v2.model.observability\_pipeline\_spec\_data module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_array\_config module +---------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_destination module -------------------------------------------------------------------------------------- diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py new file mode 100644 index 0000000000..695994d404 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py @@ -0,0 +1,79 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + +class ObservabilityPipelineAddHostnameProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + return { + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineAddHostnameProcessorType,), + } + + attribute_map = { + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineAddHostnameProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``add_hostname`` processor adds the hostname to log events. + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param type: The processor type. The value should always be ``add_hostname``. + :type type: ObservabilityPipelineAddHostnameProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py new file mode 100644 index 0000000000..3f0c2dd4ff --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAddHostnameProcessorType(ModelSimple): + """ + The processor type. The value should always be `add_hostname`. + + :param value: If omitted defaults to "add_hostname". Must be one of ["add_hostname"]. + :type value: str + """ + + allowed_values = { + "add_hostname", + } + ADD_HOSTNAME: ClassVar["ObservabilityPipelineAddHostnameProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAddHostnameProcessorType.ADD_HOSTNAME = ObservabilityPipelineAddHostnameProcessorType( + "add_hostname" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py index 6579870f58..43a1388133 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py @@ -17,48 +17,57 @@ from datadog_api_client.v2.model.observability_pipeline_config_processor_item import ( ObservabilityPipelineConfigProcessorItem, ) - from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor - from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( - ObservabilityPipelineParseJSONProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( ObservabilityPipelineAddFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( - ObservabilityPipelineRemoveFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( - ObservabilityPipelineRenameFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( ObservabilityPipelineGenerateMetricsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( ObservabilityPipelineParseGrokProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( - ObservabilityPipelineSensitiveDataScannerProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( + ObservabilityPipelineParseJSONProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, + from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor + from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( + ObservabilityPipelineRemoveFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, + from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( + ObservabilityPipelineRenameFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( + ObservabilityPipelineSensitiveDataScannerProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, - ) class ObservabilityPipelineConfigProcessorGroup(ModelNormal): @@ -95,24 +104,27 @@ def __init__( processors: List[ Union[ ObservabilityPipelineConfigProcessorItem, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineFilterProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, ] ], display_name: Union[str, UnsetType] = unset, diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py index 8becc43c0d..02edf04bb0 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py @@ -21,18 +21,78 @@ def __init__(self, **kwargs): :param enabled: Whether this processor is enabled. :type enabled: bool - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + :param id: The unique identifier for this component. Used to reference this processor in the pipeline. :type id: str - :param include: A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + :param include: A Datadog search query used to determine which logs this processor targets. :type include: str - :param type: The processor type. The value should always be `filter`. - :type type: ObservabilityPipelineFilterProcessorType + :param type: The processor type. The value should always be `add_env_vars`. + :type type: ObservabilityPipelineAddEnvVarsProcessorType + + :param variables: A list of environment variable mappings to apply to log fields. + :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] + + :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. + :type fields: [ObservabilityPipelineFieldValue] + + :param remaps: Array of VRL remap rules. + :type remaps: [ObservabilityPipelineCustomProcessorRemap] + + :param action: The action to take on tags with matching keys. + :type action: ObservabilityPipelineDatadogTagsProcessorAction + + :param keys: A list of tag keys. + :type keys: [str] + + :param mode: The processing mode. + :type mode: ObservabilityPipelineDatadogTagsProcessorMode + + :param file: Defines a static enrichment table loaded from a CSV file. + :type file: ObservabilityPipelineEnrichmentTableFile, optional + + :param geoip: Uses a GeoIP database to enrich logs based on an IP field. + :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional + + :param target: Path where enrichment results should be stored in the log. + :type target: str + + :param metrics: Configuration for generating individual metrics. + :type metrics: [ObservabilityPipelineGeneratedMetric], optional + + :param mappings: A list of mapping rules to convert events to the OCSF format. + :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] + + :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. + :type disable_library_rules: bool, optional + + :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. + :type rules: [ObservabilityPipelineParseGrokProcessorRule] :param field: The name of the log field that contains a JSON string. :type field: str + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + :param drop_events: If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. :type drop_events: bool, optional @@ -57,11 +117,11 @@ def __init__(self, **kwargs): :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional - :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. - :type fields: [ObservabilityPipelineFieldValue] + :param group_by: A list of fields used to group log events for merging. + :type group_by: [str] - :param metrics: Configuration for generating individual metrics. - :type metrics: [ObservabilityPipelineGeneratedMetric], optional + :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. + :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] :param percentage: The percentage of logs to sample. :type percentage: float, optional @@ -69,50 +129,14 @@ def __init__(self, **kwargs): :param rate: Number of events to sample (1 in N). :type rate: int, optional - :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. - :type disable_library_rules: bool, optional - - :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. - :type rules: [ObservabilityPipelineParseGrokProcessorRule] - - :param mappings: A list of mapping rules to convert events to the OCSF format. - :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] - - :param variables: A list of environment variable mappings to apply to log fields. - :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] - - :param mode: The deduplication mode to apply to the fields. - :type mode: ObservabilityPipelineDedupeProcessorMode - - :param file: Defines a static enrichment table loaded from a CSV file. - :type file: ObservabilityPipelineEnrichmentTableFile, optional - - :param geoip: Uses a GeoIP database to enrich logs based on an IP field. - :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional - - :param target: Path where enrichment results should be stored in the log. - :type target: str - - :param group_by: A list of fields used to group log events for merging. - :type group_by: [str] - - :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. - :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] :param threshold: the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped. :type threshold: int :param window: The time window in seconds over which the threshold applies. :type window: float - - :param remaps: Array of VRL remap rules. - :type remaps: [ObservabilityPipelineCustomProcessorRemap] - - :param action: The action to take on tags with matching keys. - :type action: ObservabilityPipelineDatadogTagsProcessorAction - - :param keys: A list of tag keys. - :type keys: [str] """ super().__init__(kwargs) @@ -125,17 +149,50 @@ def _composed_schemas(_): # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( + ObservabilityPipelineAddFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( + ObservabilityPipelineCustomProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( + ObservabilityPipelineDedupeProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_filter_processor import ( ObservabilityPipelineFilterProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( + ObservabilityPipelineGenerateMetricsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( + ObservabilityPipelineParseGrokProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( ObservabilityPipelineParseJSONProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_quota_processor import ( ObservabilityPipelineQuotaProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( - ObservabilityPipelineAddFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( + ObservabilityPipelineReduceProcessor, ) from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( ObservabilityPipelineRemoveFieldsProcessor, @@ -143,62 +200,41 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( ObservabilityPipelineRenameFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( - ObservabilityPipelineGenerateMetricsProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sample_processor import ( ObservabilityPipelineSampleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( - ObservabilityPipelineParseGrokProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( ObservabilityPipelineSensitiveDataScannerProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( - ObservabilityPipelineDedupeProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( - ObservabilityPipelineReduceProcessor, + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( - ObservabilityPipelineCustomProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, - ) return { "oneOf": [ + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineFilterProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py new file mode 100644 index 0000000000..afb330390f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py @@ -0,0 +1,148 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + +class ObservabilityPipelineParseXMLProcessor(ModelNormal): + validations = { + "text_key": { + "min_length": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + return { + "always_use_text_key": (bool,), + "attr_prefix": (str,), + "display_name": (str,), + "enabled": (bool,), + "field": (str,), + "id": (str,), + "include": (str,), + "include_attr": (bool,), + "parse_bool": (bool,), + "parse_null": (bool,), + "parse_number": (bool,), + "text_key": (str,), + "type": (ObservabilityPipelineParseXMLProcessorType,), + } + + attribute_map = { + "always_use_text_key": "always_use_text_key", + "attr_prefix": "attr_prefix", + "display_name": "display_name", + "enabled": "enabled", + "field": "field", + "id": "id", + "include": "include", + "include_attr": "include_attr", + "parse_bool": "parse_bool", + "parse_null": "parse_null", + "parse_number": "parse_number", + "text_key": "text_key", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + field: str, + id: str, + include: str, + type: ObservabilityPipelineParseXMLProcessorType, + always_use_text_key: Union[bool, UnsetType] = unset, + attr_prefix: Union[str, UnsetType] = unset, + display_name: Union[str, UnsetType] = unset, + include_attr: Union[bool, UnsetType] = unset, + parse_bool: Union[bool, UnsetType] = unset, + parse_null: Union[bool, UnsetType] = unset, + parse_number: Union[bool, UnsetType] = unset, + text_key: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``parse_xml`` processor parses XML from a specified field and extracts it into the event. + + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param field: The name of the log field that contains an XML string. + :type field: str + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + + :param type: The processor type. The value should always be ``parse_xml``. + :type type: ObservabilityPipelineParseXMLProcessorType + """ + if always_use_text_key is not unset: + kwargs["always_use_text_key"] = always_use_text_key + if attr_prefix is not unset: + kwargs["attr_prefix"] = attr_prefix + if display_name is not unset: + kwargs["display_name"] = display_name + if include_attr is not unset: + kwargs["include_attr"] = include_attr + if parse_bool is not unset: + kwargs["parse_bool"] = parse_bool + if parse_null is not unset: + kwargs["parse_null"] = parse_null + if parse_number is not unset: + kwargs["parse_number"] = parse_number + if text_key is not unset: + kwargs["text_key"] = text_key + super().__init__(kwargs) + + self_.enabled = enabled + self_.field = field + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py new file mode 100644 index 0000000000..5e8f0a8285 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineParseXMLProcessorType(ModelSimple): + """ + The processor type. The value should always be `parse_xml`. + + :param value: If omitted defaults to "parse_xml". Must be one of ["parse_xml"]. + :type value: str + """ + + allowed_values = { + "parse_xml", + } + PARSE_XML: ClassVar["ObservabilityPipelineParseXMLProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineParseXMLProcessorType.PARSE_XML = ObservabilityPipelineParseXMLProcessorType("parse_xml") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py new file mode 100644 index 0000000000..844e8b4ab2 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py @@ -0,0 +1,99 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + +class ObservabilityPipelineSplitArrayProcessor(ModelNormal): + validations = { + "arrays": { + "max_items": 15, + "min_items": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + return { + "arrays": ([ObservabilityPipelineSplitArrayProcessorArrayConfig],), + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineSplitArrayProcessorType,), + } + + attribute_map = { + "arrays": "arrays", + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + arrays: List[ObservabilityPipelineSplitArrayProcessorArrayConfig], + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineSplitArrayProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``split_array`` processor splits array fields into separate events based on configured rules. + + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. For split_array, this should typically be ``*``. + :type include: str + + :param type: The processor type. The value should always be ``split_array``. + :type type: ObservabilityPipelineSplitArrayProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.arrays = arrays + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py new file mode 100644 index 0000000000..ac7133ee2c --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSplitArrayProcessorArrayConfig(ModelNormal): + @cached_property + def openapi_types(_): + return { + "field": (str,), + "include": (str,), + } + + attribute_map = { + "field": "field", + "include": "include", + } + + def __init__(self_, field: str, include: str, **kwargs): + """ + Configuration for a single array split operation. + + :param field: The path to the array field to split. + :type field: str + + :param include: A Datadog search query used to determine which logs this array split operation targets. + :type include: str + """ + super().__init__(kwargs) + + self_.field = field + self_.include = include diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py new file mode 100644 index 0000000000..c6b6a7e9a1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplitArrayProcessorType(ModelSimple): + """ + The processor type. The value should always be `split_array`. + + :param value: If omitted defaults to "split_array". Must be one of ["split_array"]. + :type value: str + """ + + allowed_values = { + "split_array", + } + SPLIT_ARRAY: ClassVar["ObservabilityPipelineSplitArrayProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplitArrayProcessorType.SPLIT_ARRAY = ObservabilityPipelineSplitArrayProcessorType("split_array") diff --git a/src/datadog_api_client/v2/models/__init__.py b/src/datadog_api_client/v2/models/__init__.py index 39ee6ab446..146e81c2e7 100644 --- a/src/datadog_api_client/v2/models/__init__.py +++ b/src/datadog_api_client/v2/models/__init__.py @@ -2886,6 +2886,12 @@ from datadog_api_client.v2.model.observability_pipeline_add_fields_processor_type import ( ObservabilityPipelineAddFieldsProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( ObservabilityPipelineAmazonDataFirehoseSource, ) @@ -3179,6 +3185,12 @@ from datadog_api_client.v2.model.observability_pipeline_parse_json_processor_type import ( ObservabilityPipelineParseJSONProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( ObservabilityPipelinePipelineKafkaSourceSaslMechanism, ) @@ -3402,6 +3414,15 @@ from datadog_api_client.v2.model.observability_pipeline_socket_source_type import ObservabilityPipelineSocketSourceType from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData +from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( ObservabilityPipelineSplunkHecDestination, ) @@ -7366,6 +7387,8 @@ "ObservabilityPipelineAddEnvVarsProcessorVariable", "ObservabilityPipelineAddFieldsProcessor", "ObservabilityPipelineAddFieldsProcessorType", + "ObservabilityPipelineAddHostnameProcessor", + "ObservabilityPipelineAddHostnameProcessorType", "ObservabilityPipelineAmazonDataFirehoseSource", "ObservabilityPipelineAmazonDataFirehoseSourceType", "ObservabilityPipelineAmazonOpenSearchDestination", @@ -7479,6 +7502,8 @@ "ObservabilityPipelineParseGrokProcessorType", "ObservabilityPipelineParseJSONProcessor", "ObservabilityPipelineParseJSONProcessorType", + "ObservabilityPipelineParseXMLProcessor", + "ObservabilityPipelineParseXMLProcessorType", "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", "ObservabilityPipelineQuotaProcessor", "ObservabilityPipelineQuotaProcessorLimit", @@ -7560,6 +7585,9 @@ "ObservabilityPipelineSocketSourceType", "ObservabilityPipelineSpec", "ObservabilityPipelineSpecData", + "ObservabilityPipelineSplitArrayProcessor", + "ObservabilityPipelineSplitArrayProcessorArrayConfig", + "ObservabilityPipelineSplitArrayProcessorType", "ObservabilityPipelineSplunkHecDestination", "ObservabilityPipelineSplunkHecDestinationEncoding", "ObservabilityPipelineSplunkHecDestinationType",