diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml
index cfbcb7286ac..203e7aa0447 100644
--- a/.generator/schemas/v2/openapi.yaml
+++ b/.generator/schemas/v2/openapi.yaml
@@ -6570,8 +6570,11 @@ components:
x-enum-varnames:
- AZURE_SCAN_OPTIONS
AzureStorageDestination:
- description: The `azure_storage` destination forwards logs to an Azure Blob
+ description: 'The `azure_storage` destination forwards logs to an Azure Blob
Storage container.
+
+
+ **Supported pipeline types:** logs'
properties:
blob_prefix:
description: Optional prefix for blobs written to the container.
@@ -6602,6 +6605,8 @@ components:
- inputs
- container_name
type: object
+ x-pipeline-types:
+ - logs
AzureStorageDestinationType:
default: azure_storage
description: The destination type. The value should always be `azure_storage`.
@@ -33395,8 +33400,11 @@ components:
- query
type: object
MicrosoftSentinelDestination:
- description: The `microsoft_sentinel` destination forwards logs to Microsoft
+ description: 'The `microsoft_sentinel` destination forwards logs to Microsoft
Sentinel.
+
+
+ **Supported pipeline types:** logs'
properties:
client_id:
description: Azure AD client ID used for authentication.
@@ -33437,6 +33445,8 @@ components:
- dcr_immutable_id
- table
type: object
+ x-pipeline-types:
+ - logs
MicrosoftSentinelDestinationType:
default: microsoft_sentinel
description: The destination type. The value should always be `microsoft_sentinel`.
@@ -35141,8 +35151,11 @@ components:
- data
type: object
ObservabilityPipelineAddEnvVarsProcessor:
- description: The `add_env_vars` processor adds environment variable values to
- log events.
+ description: 'The `add_env_vars` processor adds environment variable values
+ to log events.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -35174,6 +35187,8 @@ components:
- variables
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineAddEnvVarsProcessorType:
default: add_env_vars
description: The processor type. The value should always be `add_env_vars`.
@@ -35199,7 +35214,10 @@ components:
- name
type: object
ObservabilityPipelineAddFieldsProcessor:
- description: The `add_fields` processor adds static key-value fields to logs.
+ description: 'The `add_fields` processor adds static key-value fields to logs.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -35233,6 +35251,8 @@ components:
- fields
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineAddFieldsProcessorType:
default: add_fields
description: The processor type. The value should always be `add_fields`.
@@ -35243,7 +35263,10 @@ components:
x-enum-varnames:
- ADD_FIELDS
ObservabilityPipelineAmazonDataFirehoseSource:
- description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose.
+ description: 'The `amazon_data_firehose` source ingests logs from AWS Data Firehose.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAwsAuth'
@@ -35261,6 +35284,8 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineAmazonDataFirehoseSourceType:
default: amazon_data_firehose
description: The source type. The value should always be `amazon_data_firehose`.
@@ -35271,7 +35296,10 @@ components:
x-enum-varnames:
- AMAZON_DATA_FIREHOSE
ObservabilityPipelineAmazonOpenSearchDestination:
- description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch.
+ description: 'The `amazon_opensearch` destination writes logs to Amazon OpenSearch.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth'
@@ -35299,6 +35327,8 @@ components:
- inputs
- auth
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineAmazonOpenSearchDestinationAuth:
description: 'Authentication settings for the Amazon OpenSearch destination.
@@ -35342,8 +35372,11 @@ components:
x-enum-varnames:
- AMAZON_OPENSEARCH
ObservabilityPipelineAmazonS3Destination:
- description: The `amazon_s3` destination sends your logs in Datadog-rehydratable
+ description: 'The `amazon_s3` destination sends your logs in Datadog-rehydratable
format to an Amazon S3 bucket for archiving.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAwsAuth'
@@ -35384,6 +35417,8 @@ components:
- region
- storage_class
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineAmazonS3DestinationStorageClass:
description: S3 storage class.
enum:
@@ -35420,7 +35455,10 @@ components:
ObservabilityPipelineAmazonS3Source:
description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket.
- It supports AWS authentication and TLS encryption.'
+ It supports AWS authentication and TLS encryption.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAwsAuth'
@@ -35443,6 +35481,8 @@ components:
- type
- region
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineAmazonS3SourceType:
default: amazon_s3
description: The source type. Always `amazon_s3`.
@@ -35453,8 +35493,11 @@ components:
x-enum-varnames:
- AMAZON_S3
ObservabilityPipelineAmazonSecurityLakeDestination:
- description: The `amazon_security_lake` destination sends your logs to Amazon
+ description: 'The `amazon_security_lake` destination sends your logs to Amazon
Security Lake.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineAwsAuth'
@@ -35494,6 +35537,8 @@ components:
- region
- custom_source_name
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineAmazonSecurityLakeDestinationType:
default: amazon_security_lake
description: The destination type. Always `amazon_security_lake`.
@@ -35537,6 +35582,8 @@ components:
items:
$ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem'
type: array
+ pipeline_type:
+ $ref: '#/components/schemas/ObservabilityPipelineConfigPipelineType'
processors:
description: A list of processor groups that transform or enrich log data.
example:
@@ -35573,25 +35620,38 @@ components:
ObservabilityPipelineConfigDestinationItem:
description: A destination for the pipeline.
oneOf:
- - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination'
- - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination'
- $ref: '#/components/schemas/AzureStorageDestination'
- - $ref: '#/components/schemas/MicrosoftSentinelDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination'
- $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination'
+ - $ref: '#/components/schemas/MicrosoftSentinelDestination'
- $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination'
- $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination'
- $ref: '#/components/schemas/ObservabilityPipelineSocketDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination'
- - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination'
+ - $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestination'
+ ObservabilityPipelineConfigPipelineType:
+ default: logs
+ description: The type of data being ingested. Defaults to `logs` if not specified.
+ enum:
+ - logs
+ - metrics
+ example: logs
+ type: string
+ x-enum-varnames:
+ - LOGS
+ - METRICS
ObservabilityPipelineConfigProcessorGroup:
description: A group of processors.
example:
@@ -35665,45 +35725,49 @@ components:
description: A processor for the pipeline.
oneOf:
- $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor'
- $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor'
- $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor'
- $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor'
- $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor'
- $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor'
- $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor'
- $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor'
- - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessor'
ObservabilityPipelineConfigSourceItem:
description: A data source for the pipeline.
oneOf:
- - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource'
- $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource'
- - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource'
- - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource'
- $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source'
- - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource'
- $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource'
- - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource'
- - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource'
- - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource'
- - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource'
- - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource'
- $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource'
- $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource'
- $ref: '#/components/schemas/ObservabilityPipelineLogstashSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource'
- $ref: '#/components/schemas/ObservabilityPipelineSocketSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource'
ObservabilityPipelineCrowdStrikeNextGenSiemDestination:
- description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike
+ description: 'The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike
Next Gen SIEM.
+
+
+ **Supported pipeline types:** logs'
properties:
compression:
$ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression'
@@ -35731,6 +35795,8 @@ components:
- inputs
- encoding
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression:
description: Compression configuration for log events.
properties:
@@ -35774,9 +35840,12 @@ components:
x-enum-varnames:
- CROWDSTRIKE_NEXT_GEN_SIEM
ObservabilityPipelineCustomProcessor:
- description: The `custom_processor` processor transforms events using [Vector
+ description: 'The `custom_processor` processor transforms events using [Vector
Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with
advanced filtering capabilities.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -35810,6 +35879,8 @@ components:
- remaps
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineCustomProcessorRemap:
description: Defines a single VRL remap rule with its own filtering and transformation
logic.
@@ -35885,7 +35956,11 @@ components:
- config
type: object
ObservabilityPipelineDatadogAgentSource:
- description: The `datadog_agent` source collects logs from the Datadog Agent.
+ description: 'The `datadog_agent` source collects logs/metrics from the Datadog
+ Agent.
+
+
+ **Supported pipeline types:** logs, metrics'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -35901,6 +35976,9 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
+ - metrics
ObservabilityPipelineDatadogAgentSourceType:
default: datadog_agent
description: The source type. The value should always be `datadog_agent`.
@@ -35911,7 +35989,10 @@ components:
x-enum-varnames:
- DATADOG_AGENT
ObservabilityPipelineDatadogLogsDestination:
- description: The `datadog_logs` destination forwards logs to Datadog Log Management.
+ description: 'The `datadog_logs` destination forwards logs to Datadog Log Management.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component.
@@ -35932,6 +36013,8 @@ components:
- type
- inputs
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineDatadogLogsDestinationType:
default: datadog_logs
description: The destination type. The value should always be `datadog_logs`.
@@ -35941,9 +36024,48 @@ components:
type: string
x-enum-varnames:
- DATADOG_LOGS
+ ObservabilityPipelineDatadogMetricsDestination:
+ description: 'The `datadog_metrics` destination forwards metrics to Datadog.
+
+
+ **Supported pipeline types:** metrics'
+ properties:
+ id:
+ description: The unique identifier for this component.
+ example: datadog-metrics-destination
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the input for
+ this component.
+ example:
+ - metric-tags-processor
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestinationType'
+ required:
+ - id
+ - type
+ - inputs
+ type: object
+ x-pipeline-types:
+ - metrics
+ ObservabilityPipelineDatadogMetricsDestinationType:
+ default: datadog_metrics
+ description: The destination type. The value should always be `datadog_metrics`.
+ enum:
+ - datadog_metrics
+ example: datadog_metrics
+ type: string
+ x-enum-varnames:
+ - DATADOG_METRICS
ObservabilityPipelineDatadogTagsProcessor:
- description: The `datadog_tags` processor includes or excludes specific Datadog
+ description: 'The `datadog_tags` processor includes or excludes specific Datadog
tags in your logs.
+
+
+ **Supported pipeline types:** logs'
properties:
action:
$ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction'
@@ -35986,6 +36108,8 @@ components:
- keys
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineDatadogTagsProcessorAction:
description: The action to take on tags with matching keys.
enum:
@@ -36028,7 +36152,10 @@ components:
- DECODE_JSON
- DECODE_SYSLOG
ObservabilityPipelineDedupeProcessor:
- description: The `dedupe` processor removes duplicate fields in log events.
+ description: 'The `dedupe` processor removes duplicate fields in log events.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -36065,6 +36192,8 @@ components:
- mode
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineDedupeProcessorMode:
description: The deduplication mode to apply to the fields.
enum:
@@ -36085,8 +36214,11 @@ components:
x-enum-varnames:
- DEDUPE
ObservabilityPipelineElasticsearchDestination:
- description: The `elasticsearch` destination writes logs to an Elasticsearch
+ description: 'The `elasticsearch` destination writes logs to an Elasticsearch
cluster.
+
+
+ **Supported pipeline types:** logs'
properties:
api_version:
$ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion'
@@ -36113,6 +36245,8 @@ components:
- type
- inputs
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineElasticsearchDestinationApiVersion:
description: The Elasticsearch API version to use. Set to `auto` to auto-detect.
enum:
@@ -36266,8 +36400,11 @@ components:
- path
type: object
ObservabilityPipelineEnrichmentTableProcessor:
- description: The `enrichment_table` processor enriches logs using a static CSV
- file or GeoIP database.
+ description: 'The `enrichment_table` processor enriches logs using a static
+ CSV file or GeoIP database.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -36301,6 +36438,8 @@ components:
- target
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineEnrichmentTableProcessorType:
default: enrichment_table
description: The processor type. The value should always be `enrichment_table`.
@@ -36326,9 +36465,12 @@ components:
- value
type: object
ObservabilityPipelineFilterProcessor:
- description: The `filter` processor allows conditional processing of logs based
- on a Datadog search query. Logs that match the `include` query are passed
- through; others are discarded.
+ description: 'The `filter` processor allows conditional processing of logs/metrics
+ based on a Datadog search query. Logs/metrics that match the `include` query
+ are passed through; others are discarded.
+
+
+ **Supported pipeline types:** logs, metrics'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -36343,9 +36485,9 @@ components:
example: filter-processor
type: string
include:
- description: A Datadog search query used to determine which logs should
- pass through the filter. Logs that match this query continue to downstream
- components; others are dropped.
+ description: A Datadog search query used to determine which logs/metrics
+ should pass through the filter. Logs/metrics that match this query continue
+ to downstream components; others are dropped.
example: service:my-service
type: string
type:
@@ -36356,6 +36498,9 @@ components:
- include
- enabled
type: object
+ x-pipeline-types:
+ - logs
+ - metrics
ObservabilityPipelineFilterProcessorType:
default: filter
description: The processor type. The value should always be `filter`.
@@ -36366,7 +36511,10 @@ components:
x-enum-varnames:
- FILTER
ObservabilityPipelineFluentBitSource:
- description: The `fluent_bit` source ingests logs from Fluent Bit.
+ description: 'The `fluent_bit` source ingests logs from Fluent Bit.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -36382,6 +36530,8 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineFluentBitSourceType:
default: fluent_bit
description: The source type. The value should always be `fluent_bit`.
@@ -36392,7 +36542,10 @@ components:
x-enum-varnames:
- FLUENT_BIT
ObservabilityPipelineFluentdSource:
- description: The `fluentd` source ingests logs from a Fluentd-compatible service.
+ description: 'The `fluentd` source ingests logs from a Fluentd-compatible service.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -36408,6 +36561,8 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineFluentdSourceType:
default: fluentd
description: The source type. The value should always be `fluentd.
@@ -36432,7 +36587,10 @@ components:
from logs and sends them to Datadog.
Metrics can be counters, gauges, or distributions and optionally grouped by
- log fields.'
+ log fields.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -36462,6 +36620,8 @@ components:
- type
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineGenerateMetricsProcessorType:
default: generate_datadog_metrics
description: The processor type. Always `generate_datadog_metrics`.
@@ -36556,7 +36716,10 @@ components:
- GAUGE
- DISTRIBUTION
ObservabilityPipelineGoogleChronicleDestination:
- description: The `google_chronicle` destination sends logs to Google Chronicle.
+ description: 'The `google_chronicle` destination sends logs to Google Chronicle.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineGcpAuth'
@@ -36590,6 +36753,8 @@ components:
- inputs
- customer_id
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineGoogleChronicleDestinationEncoding:
description: The encoding format for the logs sent to Chronicle.
enum:
@@ -36613,7 +36778,10 @@ components:
description: 'The `google_cloud_storage` destination stores logs in a Google
Cloud Storage (GCS) bucket.
- It requires a bucket name, GCP authentication, and metadata fields.'
+ It requires a bucket name, GCP authentication, and metadata fields.
+
+
+ **Supported pipeline types:** logs'
properties:
acl:
$ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl'
@@ -36655,6 +36823,8 @@ components:
- bucket
- storage_class
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineGoogleCloudStorageDestinationAcl:
description: Access control list setting for objects written to the bucket.
enum:
@@ -36697,8 +36867,11 @@ components:
x-enum-varnames:
- GOOGLE_CLOUD_STORAGE
ObservabilityPipelineGooglePubSubDestination:
- description: The `google_pubsub` destination publishes logs to a Google Cloud
+ description: 'The `google_pubsub` destination publishes logs to a Google Cloud
Pub/Sub topic.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineGcpAuth'
@@ -36736,6 +36909,8 @@ components:
- project
- topic
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineGooglePubSubDestinationEncoding:
description: Encoding format for log events.
enum:
@@ -36756,8 +36931,11 @@ components:
x-enum-varnames:
- GOOGLE_PUBSUB
ObservabilityPipelineGooglePubSubSource:
- description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub
+ description: 'The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub
subscription.
+
+
+ **Supported pipeline types:** logs'
properties:
auth:
$ref: '#/components/schemas/ObservabilityPipelineGcpAuth'
@@ -36788,6 +36966,8 @@ components:
- project
- subscription
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineGooglePubSubSourceType:
default: google_pubsub
description: The source type. The value should always be `google_pubsub`.
@@ -36797,9 +36977,92 @@ components:
type: string
x-enum-varnames:
- GOOGLE_PUBSUB
+ ObservabilityPipelineHttpClientDestination:
+ description: 'The `http_client` destination sends data to an HTTP endpoint.
+
+
+ **Supported pipeline types:** logs, metrics'
+ properties:
+ auth_strategy:
+ $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationAuthStrategy'
+ compression:
+ $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompression'
+ encoding:
+ $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationEncoding'
+ id:
+ description: The unique identifier for this component.
+ example: http-client-destination
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the input for
+ this component.
+ example:
+ - filter-processor
+ items:
+ type: string
+ type: array
+ tls:
+ $ref: '#/components/schemas/ObservabilityPipelineTls'
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationType'
+ required:
+ - id
+ - type
+ - inputs
+ - encoding
+ type: object
+ x-pipeline-types:
+ - logs
+ - metrics
+ ObservabilityPipelineHttpClientDestinationAuthStrategy:
+ description: HTTP authentication strategy.
+ enum:
+ - basic
+ - bearer
+ example: basic
+ type: string
+ x-enum-varnames:
+ - BASIC
+ - BEARER
+ ObservabilityPipelineHttpClientDestinationCompression:
+ description: Compression configuration for HTTP requests.
+ properties:
+ algorithm:
+ $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm'
+ required:
+ - algorithm
+ type: object
+ ObservabilityPipelineHttpClientDestinationCompressionAlgorithm:
+ description: Compression algorithm.
+ enum:
+ - gzip
+ example: gzip
+ type: string
+ x-enum-varnames:
+ - GZIP
+ ObservabilityPipelineHttpClientDestinationEncoding:
+ description: Encoding format for log events.
+ enum:
+ - json
+ example: json
+ type: string
+ x-enum-varnames:
+ - JSON
+ ObservabilityPipelineHttpClientDestinationType:
+ default: http_client
+ description: The destination type. The value should always be `http_client`.
+ enum:
+ - http_client
+ example: http_client
+ type: string
+ x-enum-varnames:
+ - HTTP_CLIENT
ObservabilityPipelineHttpClientSource:
- description: The `http_client` source scrapes logs from HTTP endpoints at regular
+ description: 'The `http_client` source scrapes logs from HTTP endpoints at regular
intervals.
+
+
+ **Supported pipeline types:** logs'
properties:
auth_strategy:
$ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy'
@@ -36830,6 +37093,8 @@ components:
- type
- decoding
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineHttpClientSourceAuthStrategy:
description: Optional authentication strategy for HTTP requests.
enum:
@@ -36850,8 +37115,11 @@ components:
x-enum-varnames:
- HTTP_CLIENT
ObservabilityPipelineHttpServerSource:
- description: The `http_server` source collects logs over HTTP POST from external
+ description: 'The `http_server` source collects logs over HTTP POST from external
services.
+
+
+ **Supported pipeline types:** logs'
properties:
auth_strategy:
$ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy'
@@ -36871,6 +37139,8 @@ components:
- auth_strategy
- decoding
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineHttpServerSourceAuthStrategy:
description: HTTP authentication method.
enum:
@@ -36891,7 +37161,10 @@ components:
x-enum-varnames:
- HTTP_SERVER
ObservabilityPipelineKafkaSource:
- description: The `kafka` source ingests data from Apache Kafka topics.
+ description: 'The `kafka` source ingests data from Apache Kafka topics.
+
+
+ **Supported pipeline types:** logs'
properties:
group_id:
description: Consumer group ID used by the Kafka client.
@@ -36930,6 +37203,8 @@ components:
- group_id
- topics
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineKafkaSourceLibrdkafkaOption:
description: Represents a key-value pair used to configure low-level `librdkafka`
client options for Kafka sources, such as timeouts, buffer sizes, and security
@@ -36964,7 +37239,10 @@ components:
x-enum-varnames:
- KAFKA
ObservabilityPipelineLogstashSource:
- description: The `logstash` source ingests logs from a Logstash forwarder.
+ description: 'The `logstash` source ingests logs from a Logstash forwarder.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -36980,6 +37258,8 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineLogstashSourceType:
default: logstash
description: The source type. The value should always be `logstash`.
@@ -37004,13 +37284,104 @@ components:
- name
- value
type: object
+ ObservabilityPipelineMetricTagsProcessor:
+ description: 'The `metric_tags` processor filters metrics based on their tags
+ using Datadog tag key patterns.
+
+
+ **Supported pipeline types:** metrics'
+ properties:
+ enabled:
+ description: Whether this processor is enabled.
+ example: true
+ type: boolean
+ id:
+ description: The unique identifier for this component. Used to reference
+ this component in other parts of the pipeline (for example, as the `input`
+ to downstream components).
+ example: metric-tags-processor
+ type: string
+ include:
+ description: A Datadog search query used to determine which metrics this
+ processor targets.
+ example: '*'
+ type: string
+ rules:
+ description: A list of rules for filtering metric tags.
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRule'
+ maxItems: 100
+ minItems: 1
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorType'
+ required:
+ - id
+ - type
+ - include
+ - rules
+ - enabled
+ type: object
+ x-pipeline-types:
+ - metrics
+ ObservabilityPipelineMetricTagsProcessorRule:
+ description: Defines a rule for filtering metric tags based on key patterns.
+ properties:
+ action:
+ $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleAction'
+ keys:
+ description: A list of tag keys to include or exclude.
+ example:
+ - env
+ - service
+ - version
+ items:
+ type: string
+ type: array
+ mode:
+ $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleMode'
+ required:
+ - mode
+ - action
+ - keys
+ type: object
+ ObservabilityPipelineMetricTagsProcessorRuleAction:
+ description: The action to take on tags with matching keys.
+ enum:
+ - include
+ - exclude
+ example: include
+ type: string
+ x-enum-varnames:
+ - INCLUDE
+ - EXCLUDE
+ ObservabilityPipelineMetricTagsProcessorRuleMode:
+ description: The processing mode for tag filtering.
+ enum:
+ - filter
+ example: filter
+ type: string
+ x-enum-varnames:
+ - FILTER
+ ObservabilityPipelineMetricTagsProcessorType:
+ default: metric_tags
+ description: The processor type. The value should always be `metric_tags`.
+ enum:
+ - metric_tags
+ example: metric_tags
+ type: string
+ x-enum-varnames:
+ - METRIC_TAGS
ObservabilityPipelineMetricValue:
description: Specifies how the value of the generated metric is computed.
oneOf:
- $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne'
- $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField'
ObservabilityPipelineNewRelicDestination:
- description: The `new_relic` destination sends logs to the New Relic platform.
+ description: 'The `new_relic` destination sends logs to the New Relic platform.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component.
@@ -37034,6 +37405,8 @@ components:
- inputs
- region
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineNewRelicDestinationRegion:
description: The New Relic region.
enum:
@@ -37054,8 +37427,11 @@ components:
x-enum-varnames:
- NEW_RELIC
ObservabilityPipelineOcsfMapperProcessor:
- description: The `ocsf_mapper` processor transforms logs into the OCSF schema
+ description: 'The `ocsf_mapper` processor transforms logs into the OCSF schema
using a predefined mapping configuration.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37087,6 +37463,8 @@ components:
- mappings
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineOcsfMapperProcessorMapping:
description: Defines how specific events are transformed to OCSF using a mapping
configuration.
@@ -37146,7 +37524,10 @@ components:
- OKTA_SYSTEM_LOG_AUTHENTICATION
- PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC
ObservabilityPipelineOpenSearchDestination:
- description: The `opensearch` destination writes logs to an OpenSearch cluster.
+ description: 'The `opensearch` destination writes logs to an OpenSearch cluster.
+
+
+ **Supported pipeline types:** logs'
properties:
bulk_index:
description: The index to write logs to.
@@ -37171,6 +37552,8 @@ components:
- type
- inputs
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineOpenSearchDestinationType:
default: opensearch
description: The destination type. The value should always be `opensearch`.
@@ -37181,8 +37564,11 @@ components:
x-enum-varnames:
- OPENSEARCH
ObservabilityPipelineParseGrokProcessor:
- description: The `parse_grok` processor extracts structured fields from unstructured
+ description: 'The `parse_grok` processor extracts structured fields from unstructured
log messages using Grok patterns.
+
+
+ **Supported pipeline types:** logs'
properties:
disable_library_rules:
default: false
@@ -37221,6 +37607,8 @@ components:
- rules
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineParseGrokProcessorRule:
description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule
defines how to extract structured fields
@@ -37300,9 +37688,12 @@ components:
x-enum-varnames:
- PARSE_GROK
ObservabilityPipelineParseJSONProcessor:
- description: The `parse_json` processor extracts JSON from a specified field
+ description: 'The `parse_json` processor extracts JSON from a specified field
and flattens it into the event. This is useful when logs contain embedded
JSON as a string.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37334,6 +37725,8 @@ components:
- field
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineParseJSONProcessorType:
default: parse_json
description: The processor type. The value should always be `parse_json`.
@@ -37355,9 +37748,12 @@ components:
- SCRAMNOT_SHANOT_256
- SCRAMNOT_SHANOT_512
ObservabilityPipelineQuotaProcessor:
- description: The Quota Processor measures logging traffic for logs that match
+ description: 'The `quota` processor measures logging traffic for logs that match
a specified filter. When the configured daily quota is met, the processor
can drop or alert.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37418,6 +37814,8 @@ components:
- limit
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineQuotaProcessorLimit:
description: The maximum amount of data or number of events allowed before the
quota is enforced. Can be specified in bytes or events.
@@ -37489,8 +37887,11 @@ components:
x-enum-varnames:
- QUOTA
ObservabilityPipelineReduceProcessor:
- description: The `reduce` processor aggregates and merges logs based on matching
+ description: 'The `reduce` processor aggregates and merges logs based on matching
keys and merge strategies.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37531,6 +37932,8 @@ components:
- merge_strategies
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineReduceProcessorMergeStrategy:
description: Defines how a specific field should be merged across grouped events.
properties:
@@ -37584,7 +37987,10 @@ components:
x-enum-varnames:
- REDUCE
ObservabilityPipelineRemoveFieldsProcessor:
- description: The `remove_fields` processor deletes specified fields from logs.
+ description: 'The `remove_fields` processor deletes specified fields from logs.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37620,6 +38026,8 @@ components:
- fields
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineRemoveFieldsProcessorType:
default: remove_fields
description: The processor type. The value should always be `remove_fields`.
@@ -37630,7 +38038,10 @@ components:
x-enum-varnames:
- REMOVE_FIELDS
ObservabilityPipelineRenameFieldsProcessor:
- description: The `rename_fields` processor changes field names.
+ description: 'The `rename_fields` processor changes field names.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37665,6 +38076,8 @@ components:
- fields
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineRenameFieldsProcessorField:
description: Defines how to rename a field in log events.
properties:
@@ -37696,8 +38109,11 @@ components:
x-enum-varnames:
- RENAME_FIELDS
ObservabilityPipelineRsyslogDestination:
- description: The `rsyslog` destination forwards logs to an external `rsyslog`
+ description: 'The `rsyslog` destination forwards logs to an external `rsyslog`
server over TCP or UDP using the syslog protocol.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component.
@@ -37726,6 +38142,8 @@ components:
- type
- inputs
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineRsyslogDestinationType:
default: rsyslog
description: The destination type. The value should always be `rsyslog`.
@@ -37736,8 +38154,11 @@ components:
x-enum-varnames:
- RSYSLOG
ObservabilityPipelineRsyslogSource:
- description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog`
- server using the syslog protocol.
+ description: 'The `rsyslog` source listens for logs over TCP or UDP from an
+ `rsyslog` server using the syslog protocol.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -37756,6 +38177,8 @@ components:
- type
- mode
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineRsyslogSourceType:
default: rsyslog
description: The source type. The value should always be `rsyslog`.
@@ -37766,8 +38189,11 @@ components:
x-enum-varnames:
- RSYSLOG
ObservabilityPipelineSampleProcessor:
- description: The `sample` processor allows probabilistic sampling of logs at
+ description: 'The `sample` processor allows probabilistic sampling of logs at
a fixed rate.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37805,6 +38231,8 @@ components:
- include
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSampleProcessorType:
default: sample
description: The processor type. The value should always be `sample`.
@@ -37815,8 +38243,11 @@ components:
x-enum-varnames:
- SAMPLE
ObservabilityPipelineSensitiveDataScannerProcessor:
- description: The `sensitive_data_scanner` processor detects and optionally redacts
- sensitive data in log events.
+ description: 'The `sensitive_data_scanner` processor detects and optionally
+ redacts sensitive data in log events.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -37850,6 +38281,8 @@ components:
- rules
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSensitiveDataScannerProcessorAction:
description: Defines what action to take when sensitive data is matched.
oneOf:
@@ -38167,7 +38600,10 @@ components:
x-enum-varnames:
- SENSITIVE_DATA_SCANNER
ObservabilityPipelineSentinelOneDestination:
- description: The `sentinel_one` destination sends logs to SentinelOne.
+ description: 'The `sentinel_one` destination sends logs to SentinelOne.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component.
@@ -38191,6 +38627,8 @@ components:
- inputs
- region
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSentinelOneDestinationRegion:
description: The SentinelOne region to send logs to.
enum:
@@ -38215,8 +38653,11 @@ components:
x-enum-varnames:
- SENTINEL_ONE
ObservabilityPipelineSocketDestination:
- description: The `socket` destination sends logs over TCP or UDP to a remote
+ description: 'The `socket` destination sends logs over TCP or UDP to a remote
server.
+
+
+ **Supported pipeline types:** logs'
properties:
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding'
@@ -38249,6 +38690,8 @@ components:
- framing
- mode
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSocketDestinationEncoding:
description: Encoding format for log events.
enum:
@@ -38343,7 +38786,10 @@ components:
x-enum-varnames:
- SOCKET
ObservabilityPipelineSocketSource:
- description: The `socket` source ingests logs over TCP or UDP.
+ description: 'The `socket` source ingests logs over TCP or UDP.
+
+
+ **Supported pipeline types:** logs'
properties:
framing:
$ref: '#/components/schemas/ObservabilityPipelineSocketSourceFraming'
@@ -38366,6 +38812,8 @@ components:
- mode
- framing
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSocketSourceFraming:
description: Framing method configuration for the socket source.
oneOf:
@@ -38507,8 +38955,11 @@ components:
- attributes
type: object
ObservabilityPipelineSplunkHecDestination:
- description: The `splunk_hec` destination forwards logs to Splunk using the
+ description: 'The `splunk_hec` destination forwards logs to Splunk using the
HTTP Event Collector (HEC).
+
+
+ **Supported pipeline types:** logs'
properties:
auto_extract_timestamp:
description: 'If `true`, Splunk tries to extract timestamps from incoming
@@ -38548,6 +38999,8 @@ components:
- type
- inputs
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSplunkHecDestinationEncoding:
description: Encoding format for log events.
enum:
@@ -38568,8 +39021,11 @@ components:
x-enum-varnames:
- SPLUNK_HEC
ObservabilityPipelineSplunkHecSource:
- description: The `splunk_hec` source implements the Splunk HTTP Event Collector
+ description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector
(HEC) API.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -38585,6 +39041,8 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSplunkHecSourceType:
default: splunk_hec
description: The source type. Always `splunk_hec`.
@@ -38598,7 +39056,10 @@ components:
description: 'The `splunk_tcp` source receives logs from a Splunk Universal
Forwarder over TCP.
- TLS is supported for secure transmission.'
+ TLS is supported for secure transmission.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -38614,6 +39075,8 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSplunkTcpSourceType:
default: splunk_tcp
description: The source type. Always `splunk_tcp`.
@@ -38624,7 +39087,10 @@ components:
x-enum-varnames:
- SPLUNK_TCP
ObservabilityPipelineSumoLogicDestination:
- description: The `sumo_logic` destination forwards logs to Sumo Logic.
+ description: 'The `sumo_logic` destination forwards logs to Sumo Logic.
+
+
+ **Supported pipeline types:** logs'
properties:
encoding:
$ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding'
@@ -38665,6 +39131,8 @@ components:
- type
- inputs
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSumoLogicDestinationEncoding:
description: The output encoding format.
enum:
@@ -38702,7 +39170,10 @@ components:
x-enum-varnames:
- SUMO_LOGIC
ObservabilityPipelineSumoLogicSource:
- description: The `sumo_logic` source receives logs from Sumo Logic collectors.
+ description: 'The `sumo_logic` source receives logs from Sumo Logic collectors.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -38716,6 +39187,8 @@ components:
- id
- type
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSumoLogicSourceType:
default: sumo_logic
description: The source type. The value should always be `sumo_logic`.
@@ -38726,8 +39199,11 @@ components:
x-enum-varnames:
- SUMO_LOGIC
ObservabilityPipelineSyslogNgDestination:
- description: The `syslog_ng` destination forwards logs to an external `syslog-ng`
+ description: 'The `syslog_ng` destination forwards logs to an external `syslog-ng`
server over TCP or UDP using the syslog protocol.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component.
@@ -38756,6 +39232,8 @@ components:
- type
- inputs
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSyslogNgDestinationType:
default: syslog_ng
description: The destination type. The value should always be `syslog_ng`.
@@ -38766,8 +39244,11 @@ components:
x-enum-varnames:
- SYSLOG_NG
ObservabilityPipelineSyslogNgSource:
- description: The `syslog_ng` source listens for logs over TCP or UDP from a
+ description: 'The `syslog_ng` source listens for logs over TCP or UDP from a
`syslog-ng` server using the syslog protocol.
+
+
+ **Supported pipeline types:** logs'
properties:
id:
description: The unique identifier for this component. Used to reference
@@ -38786,6 +39267,8 @@ components:
- type
- mode
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineSyslogNgSourceType:
default: syslog_ng
description: The source type. The value should always be `syslog_ng`.
@@ -38806,8 +39289,11 @@ components:
- TCP
- UDP
ObservabilityPipelineThrottleProcessor:
- description: The `throttle` processor limits the number of events that pass
+ description: 'The `throttle` processor limits the number of events that pass
through over a given time window.
+
+
+ **Supported pipeline types:** logs'
properties:
display_name:
$ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName'
@@ -38853,6 +39339,8 @@ components:
- window
- enabled
type: object
+ x-pipeline-types:
+ - logs
ObservabilityPipelineThrottleProcessorType:
default: throttle
description: The processor type. The value should always be `throttle`.
diff --git a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java
index 2404a627a2b..1687442550d 100644
--- a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The azure_storage destination forwards logs to an Azure Blob Storage container. */
+/**
+ * The azure_storage destination forwards logs to an Azure Blob Storage container.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
AzureStorageDestination.JSON_PROPERTY_BLOB_PREFIX,
AzureStorageDestination.JSON_PROPERTY_CONTAINER_NAME,
diff --git a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java
index 62d3cfd1cbc..51571518f7b 100644
--- a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The microsoft_sentinel destination forwards logs to Microsoft Sentinel. */
+/**
+ * The microsoft_sentinel destination forwards logs to Microsoft Sentinel.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
MicrosoftSentinelDestination.JSON_PROPERTY_CLIENT_ID,
MicrosoftSentinelDestination.JSON_PROPERTY_DCR_IMMUTABLE_ID,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java
index 8acbbf172ae..95ea9fc0ff5 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The add_env_vars processor adds environment variable values to log events. */
+/**
+ * The add_env_vars processor adds environment variable values to log events.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_DISPLAY_NAME,
ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_ENABLED,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java
index f98ca98473b..3c3a4818ff2 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The add_fields processor adds static key-value fields to logs. */
+/**
+ * The add_fields processor adds static key-value fields to logs.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_DISPLAY_NAME,
ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_ENABLED,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java
index 961394d8736..5364cf4bd4f 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The amazon_data_firehose source ingests logs from AWS Data Firehose. */
+/**
+ * The amazon_data_firehose source ingests logs from AWS Data Firehose.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_AUTH,
ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_ID,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java
index f8e615e74d8..57c9c89f0ca 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The amazon_opensearch destination writes logs to Amazon OpenSearch. */
+/**
+ * The amazon_opensearch destination writes logs to Amazon OpenSearch.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_AUTH,
ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_BULK_INDEX,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java
index e7b0612ac30..6538942f2ef 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java
@@ -22,6 +22,8 @@
/**
* The amazon_s3 destination sends your logs in Datadog-rehydratable format to an
* Amazon S3 bucket for archiving.
+ *
+ *
Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_AUTH,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java
index 2961c4fec42..ec612b3b88f 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java
@@ -20,6 +20,8 @@
/**
* The amazon_s3 source ingests logs from an Amazon S3 bucket. It supports AWS
* authentication and TLS encryption.
+ *
+ *
Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_AUTH,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java
index 2ea8bb4a780..221bc4b2806 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The amazon_security_lake destination sends your logs to Amazon Security Lake. */
+/**
+ * The amazon_security_lake destination sends your logs to Amazon Security Lake.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_AUTH,
ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_BUCKET,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java
index 4acddfa3a70..c1829473198 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java
@@ -22,6 +22,7 @@
/** Specifies the pipeline's configuration, including its sources, processors, and destinations. */
@JsonPropertyOrder({
ObservabilityPipelineConfig.JSON_PROPERTY_DESTINATIONS,
+ ObservabilityPipelineConfig.JSON_PROPERTY_PIPELINE_TYPE,
ObservabilityPipelineConfig.JSON_PROPERTY_PROCESSORS,
ObservabilityPipelineConfig.JSON_PROPERTY_SOURCES
})
@@ -32,6 +33,10 @@ public class ObservabilityPipelineConfig {
public static final String JSON_PROPERTY_DESTINATIONS = "destinations";
private List destinations = new ArrayList<>();
+ public static final String JSON_PROPERTY_PIPELINE_TYPE = "pipeline_type";
+ private ObservabilityPipelineConfigPipelineType pipelineType =
+ ObservabilityPipelineConfigPipelineType.LOGS;
+
public static final String JSON_PROPERTY_PROCESSORS = "processors";
private List processors = null;
@@ -81,6 +86,32 @@ public void setDestinations(List des
this.destinations = destinations;
}
+ public ObservabilityPipelineConfig pipelineType(
+ ObservabilityPipelineConfigPipelineType pipelineType) {
+ this.pipelineType = pipelineType;
+ this.unparsed |= !pipelineType.isValid();
+ return this;
+ }
+
+ /**
+ * The type of data being ingested. Defaults to logs if not specified.
+ *
+ * @return pipelineType
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_PIPELINE_TYPE)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineConfigPipelineType getPipelineType() {
+ return pipelineType;
+ }
+
+ public void setPipelineType(ObservabilityPipelineConfigPipelineType pipelineType) {
+ if (!pipelineType.isValid()) {
+ this.unparsed = true;
+ }
+ this.pipelineType = pipelineType;
+ }
+
public ObservabilityPipelineConfig processors(
List processors) {
this.processors = processors;
@@ -203,6 +234,7 @@ public boolean equals(Object o) {
}
ObservabilityPipelineConfig observabilityPipelineConfig = (ObservabilityPipelineConfig) o;
return Objects.equals(this.destinations, observabilityPipelineConfig.destinations)
+ && Objects.equals(this.pipelineType, observabilityPipelineConfig.pipelineType)
&& Objects.equals(this.processors, observabilityPipelineConfig.processors)
&& Objects.equals(this.sources, observabilityPipelineConfig.sources)
&& Objects.equals(
@@ -211,7 +243,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(destinations, processors, sources, additionalProperties);
+ return Objects.hash(destinations, pipelineType, processors, sources, additionalProperties);
}
@Override
@@ -219,6 +251,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ObservabilityPipelineConfig {\n");
sb.append(" destinations: ").append(toIndentedString(destinations)).append("\n");
+ sb.append(" pipelineType: ").append(toIndentedString(pipelineType)).append("\n");
sb.append(" processors: ").append(toIndentedString(processors)).append("\n");
sb.append(" sources: ").append(toIndentedString(sources)).append("\n");
sb.append(" additionalProperties: ")
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java
index 2f4e68bdef4..e6b4a173710 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java
@@ -90,55 +90,109 @@ public ObservabilityPipelineConfigDestinationItem deserialize(
boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS);
int match = 0;
JsonToken token = tree.traverse(jp.getCodec()).nextToken();
- // deserialize ObservabilityPipelineDatadogLogsDestination
+ // deserialize ObservabilityPipelineHttpClientDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class)
- || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class)
- || ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class)
- || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class)
- || ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class)
- || ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineHttpClientDestination.class.equals(Integer.class)
+ || ObservabilityPipelineHttpClientDestination.class.equals(Long.class)
+ || ObservabilityPipelineHttpClientDestination.class.equals(Float.class)
+ || ObservabilityPipelineHttpClientDestination.class.equals(Double.class)
+ || ObservabilityPipelineHttpClientDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineHttpClientDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class)
- || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class))
+ ((ObservabilityPipelineHttpClientDestination.class.equals(Integer.class)
+ || ObservabilityPipelineHttpClientDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class)
- || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class))
+ ((ObservabilityPipelineHttpClientDestination.class.equals(Float.class)
+ || ObservabilityPipelineHttpClientDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineHttpClientDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)
+ (ObservabilityPipelineHttpClientDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineDatadogLogsDestination.class);
+ .readValueAs(ObservabilityPipelineHttpClientDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineDatadogLogsDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineHttpClientDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
Level.FINER,
- "Input data matches schema 'ObservabilityPipelineDatadogLogsDestination'");
+ "Input data matches schema 'ObservabilityPipelineHttpClientDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineDatadogLogsDestination'",
+ "Input data does not match schema 'ObservabilityPipelineHttpClientDestination'",
+ e);
+ }
+
+ // deserialize ObservabilityPipelineAmazonOpenSearchDestination
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Long.class)
+ || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class)
+ || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Double.class)
+ || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(
+ Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class)
+ || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(
+ Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineAmazonOpenSearchDestination.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineAmazonOpenSearchDestination) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineAmazonOpenSearchDestination'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineAmazonOpenSearchDestination'",
e);
}
@@ -193,159 +247,209 @@ public ObservabilityPipelineConfigDestinationItem deserialize(
e);
}
- // deserialize ObservabilityPipelineGoogleCloudStorageDestination
+ // deserialize ObservabilityPipelineAmazonSecurityLakeDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class)
- || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Long.class)
- || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class)
- || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Double.class)
- || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class)
- || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Long.class)
+ || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class)
+ || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Double.class)
+ || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class)
- || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(
+ ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(
Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class)
- || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(
+ ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class)
+ || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(
Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)
+ (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineGoogleCloudStorageDestination.class);
+ .readValueAs(ObservabilityPipelineAmazonSecurityLakeDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineGoogleCloudStorageDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineAmazonSecurityLakeDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
Level.FINER,
- "Input data matches schema 'ObservabilityPipelineGoogleCloudStorageDestination'");
+ "Input data matches schema 'ObservabilityPipelineAmazonSecurityLakeDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineGoogleCloudStorageDestination'",
+ "Input data does not match schema 'ObservabilityPipelineAmazonSecurityLakeDestination'",
e);
}
- // deserialize ObservabilityPipelineSplunkHecDestination
+ // deserialize AzureStorageDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class)
- || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class)
- || ObservabilityPipelineSplunkHecDestination.class.equals(Float.class)
- || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class)
- || ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class)
- || ObservabilityPipelineSplunkHecDestination.class.equals(String.class)) {
+ if (AzureStorageDestination.class.equals(Integer.class)
+ || AzureStorageDestination.class.equals(Long.class)
+ || AzureStorageDestination.class.equals(Float.class)
+ || AzureStorageDestination.class.equals(Double.class)
+ || AzureStorageDestination.class.equals(Boolean.class)
+ || AzureStorageDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class)
- || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class))
+ ((AzureStorageDestination.class.equals(Integer.class)
+ || AzureStorageDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSplunkHecDestination.class.equals(Float.class)
- || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class))
+ ((AzureStorageDestination.class.equals(Float.class)
+ || AzureStorageDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class)
+ (AzureStorageDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSplunkHecDestination.class.equals(String.class)
+ (AzureStorageDestination.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp = tree.traverse(jp.getCodec()).readValueAs(AzureStorageDestination.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((AzureStorageDestination) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(Level.FINER, "Input data matches schema 'AzureStorageDestination'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(Level.FINER, "Input data does not match schema 'AzureStorageDestination'", e);
+ }
+
+ // deserialize ObservabilityPipelineCrowdStrikeNextGenSiemDestination
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class)
+ || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Long.class)
+ || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class)
+ || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Double.class)
+ || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class)
+ || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(
+ Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class)
+ || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(
+ Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineSplunkHecDestination.class);
+ .readValueAs(ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSplunkHecDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineCrowdStrikeNextGenSiemDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecDestination'");
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSplunkHecDestination'",
+ "Input data does not match schema"
+ + " 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'",
e);
}
- // deserialize ObservabilityPipelineSumoLogicDestination
+ // deserialize ObservabilityPipelineDatadogLogsDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class)
- || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class)
- || ObservabilityPipelineSumoLogicDestination.class.equals(Float.class)
- || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class)
- || ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class)
- || ObservabilityPipelineSumoLogicDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class)
- || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class))
+ ((ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSumoLogicDestination.class.equals(Float.class)
- || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class))
+ ((ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSumoLogicDestination.class.equals(String.class)
+ (ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineSumoLogicDestination.class);
+ .readValueAs(ObservabilityPipelineDatadogLogsDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSumoLogicDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineDatadogLogsDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicDestination'");
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineDatadogLogsDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSumoLogicDestination'",
+ "Input data does not match schema 'ObservabilityPipelineDatadogLogsDestination'",
e);
}
@@ -401,151 +505,163 @@ public ObservabilityPipelineConfigDestinationItem deserialize(
e);
}
- // deserialize ObservabilityPipelineRsyslogDestination
+ // deserialize ObservabilityPipelineGoogleChronicleDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineRsyslogDestination.class.equals(Integer.class)
- || ObservabilityPipelineRsyslogDestination.class.equals(Long.class)
- || ObservabilityPipelineRsyslogDestination.class.equals(Float.class)
- || ObservabilityPipelineRsyslogDestination.class.equals(Double.class)
- || ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class)
- || ObservabilityPipelineRsyslogDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class)
+ || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class)
+ || ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class)
+ || ObservabilityPipelineGoogleChronicleDestination.class.equals(Double.class)
+ || ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineRsyslogDestination.class.equals(Integer.class)
- || ObservabilityPipelineRsyslogDestination.class.equals(Long.class))
+ ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class)
+ || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineRsyslogDestination.class.equals(Float.class)
- || ObservabilityPipelineRsyslogDestination.class.equals(Double.class))
+ ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class)
+ || ObservabilityPipelineGoogleChronicleDestination.class.equals(
+ Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineRsyslogDestination.class.equals(String.class)
+ (ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineRsyslogDestination.class);
+ .readValueAs(ObservabilityPipelineGoogleChronicleDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineRsyslogDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineGoogleChronicleDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogDestination'");
- }
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineGoogleChronicleDestination'");
+ }
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineRsyslogDestination'",
+ "Input data does not match schema 'ObservabilityPipelineGoogleChronicleDestination'",
e);
}
- // deserialize ObservabilityPipelineSyslogNgDestination
+ // deserialize ObservabilityPipelineGoogleCloudStorageDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class)
- || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class)
- || ObservabilityPipelineSyslogNgDestination.class.equals(Float.class)
- || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class)
- || ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class)
- || ObservabilityPipelineSyslogNgDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class)
+ || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Long.class)
+ || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class)
+ || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Double.class)
+ || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class)
- || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class))
+ ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class)
+ || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(
+ Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSyslogNgDestination.class.equals(Float.class)
- || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class))
+ ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class)
+ || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(
+ Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSyslogNgDestination.class.equals(String.class)
+ (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineSyslogNgDestination.class);
+ .readValueAs(ObservabilityPipelineGoogleCloudStorageDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSyslogNgDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineGoogleCloudStorageDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgDestination'");
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineGoogleCloudStorageDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSyslogNgDestination'",
+ "Input data does not match schema 'ObservabilityPipelineGoogleCloudStorageDestination'",
e);
}
- // deserialize AzureStorageDestination
+ // deserialize ObservabilityPipelineGooglePubSubDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (AzureStorageDestination.class.equals(Integer.class)
- || AzureStorageDestination.class.equals(Long.class)
- || AzureStorageDestination.class.equals(Float.class)
- || AzureStorageDestination.class.equals(Double.class)
- || AzureStorageDestination.class.equals(Boolean.class)
- || AzureStorageDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class)
+ || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class)
+ || ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class)
+ || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class)
+ || ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineGooglePubSubDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((AzureStorageDestination.class.equals(Integer.class)
- || AzureStorageDestination.class.equals(Long.class))
+ ((ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class)
+ || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((AzureStorageDestination.class.equals(Float.class)
- || AzureStorageDestination.class.equals(Double.class))
+ ((ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class)
+ || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (AzureStorageDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (AzureStorageDestination.class.equals(String.class)
+ (ObservabilityPipelineGooglePubSubDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(AzureStorageDestination.class);
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineGooglePubSubDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((AzureStorageDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineGooglePubSubDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'AzureStorageDestination'");
+ log.log(
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineGooglePubSubDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
- log.log(Level.FINER, "Input data does not match schema 'AzureStorageDestination'", e);
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineGooglePubSubDestination'",
+ e);
}
// deserialize MicrosoftSentinelDestination
@@ -593,59 +709,6 @@ public ObservabilityPipelineConfigDestinationItem deserialize(
log.log(Level.FINER, "Input data does not match schema 'MicrosoftSentinelDestination'", e);
}
- // deserialize ObservabilityPipelineGoogleChronicleDestination
- try {
- boolean attemptParsing = true;
- // ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class)
- || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class)
- || ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class)
- || ObservabilityPipelineGoogleChronicleDestination.class.equals(Double.class)
- || ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class)
- || ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class)) {
- attemptParsing = typeCoercion;
- if (!attemptParsing) {
- attemptParsing |=
- ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class)
- || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class))
- && token == JsonToken.VALUE_NUMBER_INT);
- attemptParsing |=
- ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class)
- || ObservabilityPipelineGoogleChronicleDestination.class.equals(
- Double.class))
- && (token == JsonToken.VALUE_NUMBER_FLOAT
- || token == JsonToken.VALUE_NUMBER_INT));
- attemptParsing |=
- (ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class)
- && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
- attemptParsing |=
- (ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class)
- && token == JsonToken.VALUE_STRING);
- }
- }
- if (attemptParsing) {
- tmp =
- tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineGoogleChronicleDestination.class);
- // TODO: there is no validation against JSON schema constraints
- // (min, max, enum, pattern...), this does not perform a strict JSON
- // validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineGoogleChronicleDestination) tmp).unparsed) {
- deserialized = tmp;
- match++;
- }
- log.log(
- Level.FINER,
- "Input data matches schema 'ObservabilityPipelineGoogleChronicleDestination'");
- }
- } catch (Exception e) {
- // deserialization failed, continue
- log.log(
- Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineGoogleChronicleDestination'",
- e);
- }
-
// deserialize ObservabilityPipelineNewRelicDestination
try {
boolean attemptParsing = true;
@@ -697,161 +760,158 @@ public ObservabilityPipelineConfigDestinationItem deserialize(
e);
}
- // deserialize ObservabilityPipelineSentinelOneDestination
+ // deserialize ObservabilityPipelineOpenSearchDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class)
- || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class)
- || ObservabilityPipelineSentinelOneDestination.class.equals(Float.class)
- || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class)
- || ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class)
- || ObservabilityPipelineSentinelOneDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineOpenSearchDestination.class.equals(Integer.class)
+ || ObservabilityPipelineOpenSearchDestination.class.equals(Long.class)
+ || ObservabilityPipelineOpenSearchDestination.class.equals(Float.class)
+ || ObservabilityPipelineOpenSearchDestination.class.equals(Double.class)
+ || ObservabilityPipelineOpenSearchDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineOpenSearchDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class)
- || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class))
+ ((ObservabilityPipelineOpenSearchDestination.class.equals(Integer.class)
+ || ObservabilityPipelineOpenSearchDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSentinelOneDestination.class.equals(Float.class)
- || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class))
+ ((ObservabilityPipelineOpenSearchDestination.class.equals(Float.class)
+ || ObservabilityPipelineOpenSearchDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineOpenSearchDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSentinelOneDestination.class.equals(String.class)
+ (ObservabilityPipelineOpenSearchDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineSentinelOneDestination.class);
+ .readValueAs(ObservabilityPipelineOpenSearchDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSentinelOneDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineOpenSearchDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
Level.FINER,
- "Input data matches schema 'ObservabilityPipelineSentinelOneDestination'");
+ "Input data matches schema 'ObservabilityPipelineOpenSearchDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSentinelOneDestination'",
+ "Input data does not match schema 'ObservabilityPipelineOpenSearchDestination'",
e);
}
- // deserialize ObservabilityPipelineOpenSearchDestination
+ // deserialize ObservabilityPipelineRsyslogDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineOpenSearchDestination.class.equals(Integer.class)
- || ObservabilityPipelineOpenSearchDestination.class.equals(Long.class)
- || ObservabilityPipelineOpenSearchDestination.class.equals(Float.class)
- || ObservabilityPipelineOpenSearchDestination.class.equals(Double.class)
- || ObservabilityPipelineOpenSearchDestination.class.equals(Boolean.class)
- || ObservabilityPipelineOpenSearchDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineRsyslogDestination.class.equals(Integer.class)
+ || ObservabilityPipelineRsyslogDestination.class.equals(Long.class)
+ || ObservabilityPipelineRsyslogDestination.class.equals(Float.class)
+ || ObservabilityPipelineRsyslogDestination.class.equals(Double.class)
+ || ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineRsyslogDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineOpenSearchDestination.class.equals(Integer.class)
- || ObservabilityPipelineOpenSearchDestination.class.equals(Long.class))
+ ((ObservabilityPipelineRsyslogDestination.class.equals(Integer.class)
+ || ObservabilityPipelineRsyslogDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineOpenSearchDestination.class.equals(Float.class)
- || ObservabilityPipelineOpenSearchDestination.class.equals(Double.class))
+ ((ObservabilityPipelineRsyslogDestination.class.equals(Float.class)
+ || ObservabilityPipelineRsyslogDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineOpenSearchDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineOpenSearchDestination.class.equals(String.class)
+ (ObservabilityPipelineRsyslogDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineOpenSearchDestination.class);
+ .readValueAs(ObservabilityPipelineRsyslogDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineOpenSearchDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineRsyslogDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER,
- "Input data matches schema 'ObservabilityPipelineOpenSearchDestination'");
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineOpenSearchDestination'",
+ "Input data does not match schema 'ObservabilityPipelineRsyslogDestination'",
e);
}
- // deserialize ObservabilityPipelineAmazonOpenSearchDestination
+ // deserialize ObservabilityPipelineSentinelOneDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class)
- || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Long.class)
- || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class)
- || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Double.class)
- || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class)
- || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class)
+ || ObservabilityPipelineSentinelOneDestination.class.equals(Float.class)
+ || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class)
+ || ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineSentinelOneDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class)
- || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(
- Long.class))
+ ((ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class)
- || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(
- Double.class))
+ ((ObservabilityPipelineSentinelOneDestination.class.equals(Float.class)
+ || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class)
+ (ObservabilityPipelineSentinelOneDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineAmazonOpenSearchDestination.class);
+ .readValueAs(ObservabilityPipelineSentinelOneDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineAmazonOpenSearchDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineSentinelOneDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
Level.FINER,
- "Input data matches schema 'ObservabilityPipelineAmazonOpenSearchDestination'");
+ "Input data matches schema 'ObservabilityPipelineSentinelOneDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineAmazonOpenSearchDestination'",
+ "Input data does not match schema 'ObservabilityPipelineSentinelOneDestination'",
e);
}
@@ -906,164 +966,209 @@ public ObservabilityPipelineConfigDestinationItem deserialize(
e);
}
- // deserialize ObservabilityPipelineAmazonSecurityLakeDestination
+ // deserialize ObservabilityPipelineSplunkHecDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class)
- || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Long.class)
- || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class)
- || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Double.class)
- || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class)
- || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class)
+ || ObservabilityPipelineSplunkHecDestination.class.equals(Float.class)
+ || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class)
+ || ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineSplunkHecDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class)
- || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(
- Long.class))
+ ((ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class)
- || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(
- Double.class))
+ ((ObservabilityPipelineSplunkHecDestination.class.equals(Float.class)
+ || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class)
+ (ObservabilityPipelineSplunkHecDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineAmazonSecurityLakeDestination.class);
+ .readValueAs(ObservabilityPipelineSplunkHecDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineAmazonSecurityLakeDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineSplunkHecDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER,
- "Input data matches schema 'ObservabilityPipelineAmazonSecurityLakeDestination'");
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineAmazonSecurityLakeDestination'",
+ "Input data does not match schema 'ObservabilityPipelineSplunkHecDestination'",
e);
}
- // deserialize ObservabilityPipelineCrowdStrikeNextGenSiemDestination
+ // deserialize ObservabilityPipelineSumoLogicDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class)
- || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Long.class)
- || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class)
- || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Double.class)
- || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class)
- || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class)
+ || ObservabilityPipelineSumoLogicDestination.class.equals(Float.class)
+ || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class)
+ || ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineSumoLogicDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class)
- || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(
- Long.class))
+ ((ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class)
- || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(
- Double.class))
+ ((ObservabilityPipelineSumoLogicDestination.class.equals(Float.class)
+ || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class)
+ (ObservabilityPipelineSumoLogicDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class);
+ .readValueAs(ObservabilityPipelineSumoLogicDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineCrowdStrikeNextGenSiemDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineSumoLogicDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER,
- "Input data matches schema 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'");
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema"
- + " 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'",
+ "Input data does not match schema 'ObservabilityPipelineSumoLogicDestination'",
e);
}
- // deserialize ObservabilityPipelineGooglePubSubDestination
+ // deserialize ObservabilityPipelineSyslogNgDestination
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class)
- || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class)
- || ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class)
- || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class)
- || ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class)
- || ObservabilityPipelineGooglePubSubDestination.class.equals(String.class)) {
+ if (ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class)
+ || ObservabilityPipelineSyslogNgDestination.class.equals(Float.class)
+ || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class)
+ || ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineSyslogNgDestination.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class)
- || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class))
+ ((ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class)
+ || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class)
- || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class))
+ ((ObservabilityPipelineSyslogNgDestination.class.equals(Float.class)
+ || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class)
+ (ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineGooglePubSubDestination.class.equals(String.class)
+ (ObservabilityPipelineSyslogNgDestination.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineGooglePubSubDestination.class);
+ .readValueAs(ObservabilityPipelineSyslogNgDestination.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineGooglePubSubDestination) tmp).unparsed) {
+ if (!((ObservabilityPipelineSyslogNgDestination) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgDestination'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineSyslogNgDestination'",
+ e);
+ }
+
+ // deserialize ObservabilityPipelineDatadogMetricsDestination
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineDatadogMetricsDestination.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogMetricsDestination.class.equals(Long.class)
+ || ObservabilityPipelineDatadogMetricsDestination.class.equals(Float.class)
+ || ObservabilityPipelineDatadogMetricsDestination.class.equals(Double.class)
+ || ObservabilityPipelineDatadogMetricsDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineDatadogMetricsDestination.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineDatadogMetricsDestination.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogMetricsDestination.class.equals(Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineDatadogMetricsDestination.class.equals(Float.class)
+ || ObservabilityPipelineDatadogMetricsDestination.class.equals(
+ Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineDatadogMetricsDestination.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineDatadogMetricsDestination.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineDatadogMetricsDestination.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineDatadogMetricsDestination) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
Level.FINER,
- "Input data matches schema 'ObservabilityPipelineGooglePubSubDestination'");
+ "Input data matches schema 'ObservabilityPipelineDatadogMetricsDestination'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineGooglePubSubDestination'",
+ "Input data does not match schema 'ObservabilityPipelineDatadogMetricsDestination'",
e);
}
@@ -1098,7 +1203,13 @@ public ObservabilityPipelineConfigDestinationItem() {
super("oneOf", Boolean.FALSE);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLogsDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineHttpClientDestination o) {
+ super("oneOf", Boolean.FALSE);
+ setActualInstance(o);
+ }
+
+ public ObservabilityPipelineConfigDestinationItem(
+ ObservabilityPipelineAmazonOpenSearchDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
@@ -1109,17 +1220,23 @@ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineAmazonS3D
}
public ObservabilityPipelineConfigDestinationItem(
- ObservabilityPipelineGoogleCloudStorageDestination o) {
+ ObservabilityPipelineAmazonSecurityLakeDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSplunkHecDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(AzureStorageDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSumoLogicDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(
+ ObservabilityPipelineCrowdStrikeNextGenSiemDestination o) {
+ super("oneOf", Boolean.FALSE);
+ setActualInstance(o);
+ }
+
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLogsDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
@@ -1130,17 +1247,20 @@ public ObservabilityPipelineConfigDestinationItem(
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineRsyslogDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(
+ ObservabilityPipelineGoogleChronicleDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSyslogNgDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(
+ ObservabilityPipelineGoogleCloudStorageDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(AzureStorageDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(
+ ObservabilityPipelineGooglePubSubDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
@@ -1150,110 +1270,112 @@ public ObservabilityPipelineConfigDestinationItem(MicrosoftSentinelDestination o
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(
- ObservabilityPipelineGoogleChronicleDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineNewRelicDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineNewRelicDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineOpenSearchDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSentinelOneDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineRsyslogDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineOpenSearchDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSentinelOneDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(
- ObservabilityPipelineAmazonOpenSearchDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSocketDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSocketDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSplunkHecDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(
- ObservabilityPipelineAmazonSecurityLakeDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSumoLogicDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigDestinationItem(
- ObservabilityPipelineCrowdStrikeNextGenSiemDestination o) {
+ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSyslogNgDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
public ObservabilityPipelineConfigDestinationItem(
- ObservabilityPipelineGooglePubSubDestination o) {
+ ObservabilityPipelineDatadogMetricsDestination o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
static {
schemas.put(
- "ObservabilityPipelineDatadogLogsDestination",
- new GenericType() {});
+ "ObservabilityPipelineHttpClientDestination",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineAmazonOpenSearchDestination",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineAmazonS3Destination",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineGoogleCloudStorageDestination",
- new GenericType() {});
+ "ObservabilityPipelineAmazonSecurityLakeDestination",
+ new GenericType() {});
+ schemas.put("AzureStorageDestination", new GenericType() {});
schemas.put(
- "ObservabilityPipelineSplunkHecDestination",
- new GenericType() {});
+ "ObservabilityPipelineCrowdStrikeNextGenSiemDestination",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineSumoLogicDestination",
- new GenericType() {});
+ "ObservabilityPipelineDatadogLogsDestination",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineElasticsearchDestination",
new GenericType() {});
- schemas.put(
- "ObservabilityPipelineRsyslogDestination",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineSyslogNgDestination",
- new GenericType() {});
- schemas.put("AzureStorageDestination", new GenericType() {});
- schemas.put("MicrosoftSentinelDestination", new GenericType() {});
schemas.put(
"ObservabilityPipelineGoogleChronicleDestination",
new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineGoogleCloudStorageDestination",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineGooglePubSubDestination",
+ new GenericType() {});
+ schemas.put("MicrosoftSentinelDestination", new GenericType() {});
schemas.put(
"ObservabilityPipelineNewRelicDestination",
new GenericType() {});
- schemas.put(
- "ObservabilityPipelineSentinelOneDestination",
- new GenericType() {});
schemas.put(
"ObservabilityPipelineOpenSearchDestination",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineAmazonOpenSearchDestination",
- new GenericType() {});
+ "ObservabilityPipelineRsyslogDestination",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineSentinelOneDestination",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineSocketDestination",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineAmazonSecurityLakeDestination",
- new GenericType() {});
+ "ObservabilityPipelineSplunkHecDestination",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineCrowdStrikeNextGenSiemDestination",
- new GenericType() {});
+ "ObservabilityPipelineSumoLogicDestination",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineGooglePubSubDestination",
- new GenericType() {});
+ "ObservabilityPipelineSyslogNgDestination",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineDatadogMetricsDestination",
+ new GenericType() {});
JSON.registerDescendants(
ObservabilityPipelineConfigDestinationItem.class, Collections.unmodifiableMap(schemas));
}
@@ -1265,17 +1387,19 @@ public Map getSchemas() {
/**
* Set the instance that matches the oneOf child schema, check the instance parameter is valid
- * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination,
- * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination,
- * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination,
- * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination,
- * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination,
- * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination,
- * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination,
- * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination,
- * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination,
+ * against the oneOf child schemas: ObservabilityPipelineHttpClientDestination,
+ * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination,
+ * ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination,
* ObservabilityPipelineCrowdStrikeNextGenSiemDestination,
- * ObservabilityPipelineGooglePubSubDestination
+ * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineElasticsearchDestination,
+ * ObservabilityPipelineGoogleChronicleDestination,
+ * ObservabilityPipelineGoogleCloudStorageDestination,
+ * ObservabilityPipelineGooglePubSubDestination, MicrosoftSentinelDestination,
+ * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineOpenSearchDestination,
+ * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSentinelOneDestination,
+ * ObservabilityPipelineSocketDestination, ObservabilityPipelineSplunkHecDestination,
+ * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineSyslogNgDestination,
+ * ObservabilityPipelineDatadogMetricsDestination
*
* It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a
* composed schema (allOf, anyOf, oneOf).
@@ -1283,7 +1407,14 @@ public Map getSchemas() {
@Override
public void setActualInstance(Object instance) {
if (JSON.isInstanceOf(
- ObservabilityPipelineDatadogLogsDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineHttpClientDestination.class, instance, new HashSet>())) {
+ super.setActualInstance(instance);
+ return;
+ }
+ if (JSON.isInstanceOf(
+ ObservabilityPipelineAmazonOpenSearchDestination.class,
+ instance,
+ new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1293,19 +1424,25 @@ public void setActualInstance(Object instance) {
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineGoogleCloudStorageDestination.class,
+ ObservabilityPipelineAmazonSecurityLakeDestination.class,
instance,
new HashSet>())) {
super.setActualInstance(instance);
return;
}
+ if (JSON.isInstanceOf(AzureStorageDestination.class, instance, new HashSet>())) {
+ super.setActualInstance(instance);
+ return;
+ }
if (JSON.isInstanceOf(
- ObservabilityPipelineSplunkHecDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class,
+ instance,
+ new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSumoLogicDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineDatadogLogsDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1315,16 +1452,19 @@ public void setActualInstance(Object instance) {
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineRsyslogDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineGoogleChronicleDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSyslogNgDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineGoogleCloudStorageDestination.class,
+ instance,
+ new HashSet>())) {
super.setActualInstance(instance);
return;
}
- if (JSON.isInstanceOf(AzureStorageDestination.class, instance, new HashSet>())) {
+ if (JSON.isInstanceOf(
+ ObservabilityPipelineGooglePubSubDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1333,53 +1473,47 @@ public void setActualInstance(Object instance) {
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineGoogleChronicleDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineNewRelicDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineNewRelicDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineOpenSearchDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSentinelOneDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineRsyslogDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineOpenSearchDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineSentinelOneDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineAmazonOpenSearchDestination.class,
- instance,
- new HashSet>())) {
+ ObservabilityPipelineSocketDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSocketDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineSplunkHecDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineAmazonSecurityLakeDestination.class,
- instance,
- new HashSet>())) {
+ ObservabilityPipelineSumoLogicDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class,
- instance,
- new HashSet>())) {
+ ObservabilityPipelineSyslogNgDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineGooglePubSubDestination.class, instance, new HashSet>())) {
+ ObservabilityPipelineDatadogMetricsDestination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1389,52 +1523,54 @@ public void setActualInstance(Object instance) {
return;
}
throw new RuntimeException(
- "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination,"
+ "Invalid instance type. Must be ObservabilityPipelineHttpClientDestination,"
+ + " ObservabilityPipelineAmazonOpenSearchDestination,"
+ " ObservabilityPipelineAmazonS3Destination,"
- + " ObservabilityPipelineGoogleCloudStorageDestination,"
- + " ObservabilityPipelineSplunkHecDestination,"
- + " ObservabilityPipelineSumoLogicDestination,"
+ + " ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination,"
+ + " ObservabilityPipelineCrowdStrikeNextGenSiemDestination,"
+ + " ObservabilityPipelineDatadogLogsDestination,"
+ " ObservabilityPipelineElasticsearchDestination,"
- + " ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination,"
- + " AzureStorageDestination, MicrosoftSentinelDestination,"
+ " ObservabilityPipelineGoogleChronicleDestination,"
+ + " ObservabilityPipelineGoogleCloudStorageDestination,"
+ + " ObservabilityPipelineGooglePubSubDestination, MicrosoftSentinelDestination,"
+ " ObservabilityPipelineNewRelicDestination,"
- + " ObservabilityPipelineSentinelOneDestination,"
+ " ObservabilityPipelineOpenSearchDestination,"
- + " ObservabilityPipelineAmazonOpenSearchDestination,"
- + " ObservabilityPipelineSocketDestination,"
- + " ObservabilityPipelineAmazonSecurityLakeDestination,"
- + " ObservabilityPipelineCrowdStrikeNextGenSiemDestination,"
- + " ObservabilityPipelineGooglePubSubDestination");
+ + " ObservabilityPipelineRsyslogDestination,"
+ + " ObservabilityPipelineSentinelOneDestination,"
+ + " ObservabilityPipelineSocketDestination, ObservabilityPipelineSplunkHecDestination,"
+ + " ObservabilityPipelineSumoLogicDestination,"
+ + " ObservabilityPipelineSyslogNgDestination,"
+ + " ObservabilityPipelineDatadogMetricsDestination");
}
/**
* Get the actual instance, which can be the following:
- * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineAmazonS3Destination,
- * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination,
- * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination,
- * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination,
- * AzureStorageDestination, MicrosoftSentinelDestination,
- * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination,
- * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination,
- * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination,
- * ObservabilityPipelineAmazonSecurityLakeDestination,
- * ObservabilityPipelineCrowdStrikeNextGenSiemDestination,
- * ObservabilityPipelineGooglePubSubDestination
+ * ObservabilityPipelineHttpClientDestination, ObservabilityPipelineAmazonOpenSearchDestination,
+ * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineAmazonSecurityLakeDestination,
+ * AzureStorageDestination, ObservabilityPipelineCrowdStrikeNextGenSiemDestination,
+ * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineElasticsearchDestination,
+ * ObservabilityPipelineGoogleChronicleDestination,
+ * ObservabilityPipelineGoogleCloudStorageDestination,
+ * ObservabilityPipelineGooglePubSubDestination, MicrosoftSentinelDestination,
+ * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineOpenSearchDestination,
+ * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSentinelOneDestination,
+ * ObservabilityPipelineSocketDestination, ObservabilityPipelineSplunkHecDestination,
+ * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineSyslogNgDestination,
+ * ObservabilityPipelineDatadogMetricsDestination
*
- * @return The actual instance (ObservabilityPipelineDatadogLogsDestination,
- * ObservabilityPipelineAmazonS3Destination,
- * ObservabilityPipelineGoogleCloudStorageDestination,
- * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination,
- * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination,
- * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination,
- * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination,
- * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination,
- * ObservabilityPipelineOpenSearchDestination,
- * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination,
- * ObservabilityPipelineAmazonSecurityLakeDestination,
+ * @return The actual instance (ObservabilityPipelineHttpClientDestination,
+ * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination,
+ * ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination,
* ObservabilityPipelineCrowdStrikeNextGenSiemDestination,
- * ObservabilityPipelineGooglePubSubDestination)
+ * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineElasticsearchDestination,
+ * ObservabilityPipelineGoogleChronicleDestination,
+ * ObservabilityPipelineGoogleCloudStorageDestination,
+ * ObservabilityPipelineGooglePubSubDestination, MicrosoftSentinelDestination,
+ * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineOpenSearchDestination,
+ * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSentinelOneDestination,
+ * ObservabilityPipelineSocketDestination, ObservabilityPipelineSplunkHecDestination,
+ * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineSyslogNgDestination,
+ * ObservabilityPipelineDatadogMetricsDestination)
*/
@Override
public Object getActualInstance() {
@@ -1442,16 +1578,29 @@ public Object getActualInstance() {
}
/**
- * Get the actual instance of `ObservabilityPipelineDatadogLogsDestination`. If the actual
- * instance is not `ObservabilityPipelineDatadogLogsDestination`, the ClassCastException will be
- * thrown.
+ * Get the actual instance of `ObservabilityPipelineHttpClientDestination`. If the actual instance
+ * is not `ObservabilityPipelineHttpClientDestination`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineDatadogLogsDestination`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogLogsDestination`
+ * @return The actual instance of `ObservabilityPipelineHttpClientDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpClientDestination`
*/
- public ObservabilityPipelineDatadogLogsDestination
- getObservabilityPipelineDatadogLogsDestination() throws ClassCastException {
- return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance();
+ public ObservabilityPipelineHttpClientDestination getObservabilityPipelineHttpClientDestination()
+ throws ClassCastException {
+ return (ObservabilityPipelineHttpClientDestination) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineAmazonOpenSearchDestination`. If the actual
+ * instance is not `ObservabilityPipelineAmazonOpenSearchDestination`, the ClassCastException will
+ * be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineAmazonOpenSearchDestination`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineAmazonOpenSearchDestination`
+ */
+ public ObservabilityPipelineAmazonOpenSearchDestination
+ getObservabilityPipelineAmazonOpenSearchDestination() throws ClassCastException {
+ return (ObservabilityPipelineAmazonOpenSearchDestination) super.getActualInstance();
}
/**
@@ -1467,41 +1616,55 @@ public ObservabilityPipelineAmazonS3Destination getObservabilityPipelineAmazonS3
}
/**
- * Get the actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`. If the actual
- * instance is not `ObservabilityPipelineGoogleCloudStorageDestination`, the ClassCastException
+ * Get the actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination`. If the actual
+ * instance is not `ObservabilityPipelineAmazonSecurityLakeDestination`, the ClassCastException
* will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`
+ * @return The actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination`
* @throws ClassCastException if the instance is not
- * `ObservabilityPipelineGoogleCloudStorageDestination`
+ * `ObservabilityPipelineAmazonSecurityLakeDestination`
*/
- public ObservabilityPipelineGoogleCloudStorageDestination
- getObservabilityPipelineGoogleCloudStorageDestination() throws ClassCastException {
- return (ObservabilityPipelineGoogleCloudStorageDestination) super.getActualInstance();
+ public ObservabilityPipelineAmazonSecurityLakeDestination
+ getObservabilityPipelineAmazonSecurityLakeDestination() throws ClassCastException {
+ return (ObservabilityPipelineAmazonSecurityLakeDestination) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineSplunkHecDestination`. If the actual instance
- * is not `ObservabilityPipelineSplunkHecDestination`, the ClassCastException will be thrown.
+ * Get the actual instance of `AzureStorageDestination`. If the actual instance is not
+ * `AzureStorageDestination`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSplunkHecDestination`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecDestination`
+ * @return The actual instance of `AzureStorageDestination`
+ * @throws ClassCastException if the instance is not `AzureStorageDestination`
*/
- public ObservabilityPipelineSplunkHecDestination getObservabilityPipelineSplunkHecDestination()
- throws ClassCastException {
- return (ObservabilityPipelineSplunkHecDestination) super.getActualInstance();
+ public AzureStorageDestination getAzureStorageDestination() throws ClassCastException {
+ return (AzureStorageDestination) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineSumoLogicDestination`. If the actual instance
- * is not `ObservabilityPipelineSumoLogicDestination`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`. If the
+ * actual instance is not `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`, the
+ * ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSumoLogicDestination`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicDestination`
+ * @return The actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`
*/
- public ObservabilityPipelineSumoLogicDestination getObservabilityPipelineSumoLogicDestination()
- throws ClassCastException {
- return (ObservabilityPipelineSumoLogicDestination) super.getActualInstance();
+ public ObservabilityPipelineCrowdStrikeNextGenSiemDestination
+ getObservabilityPipelineCrowdStrikeNextGenSiemDestination() throws ClassCastException {
+ return (ObservabilityPipelineCrowdStrikeNextGenSiemDestination) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineDatadogLogsDestination`. If the actual
+ * instance is not `ObservabilityPipelineDatadogLogsDestination`, the ClassCastException will be
+ * thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineDatadogLogsDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogLogsDestination`
+ */
+ public ObservabilityPipelineDatadogLogsDestination
+ getObservabilityPipelineDatadogLogsDestination() throws ClassCastException {
+ return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance();
}
/**
@@ -1519,38 +1682,45 @@ public ObservabilityPipelineSumoLogicDestination getObservabilityPipelineSumoLog
}
/**
- * Get the actual instance of `ObservabilityPipelineRsyslogDestination`. If the actual instance is
- * not `ObservabilityPipelineRsyslogDestination`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineGoogleChronicleDestination`. If the actual
+ * instance is not `ObservabilityPipelineGoogleChronicleDestination`, the ClassCastException will
+ * be thrown.
*
- * @return The actual instance of `ObservabilityPipelineRsyslogDestination`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineRsyslogDestination`
+ * @return The actual instance of `ObservabilityPipelineGoogleChronicleDestination`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineGoogleChronicleDestination`
*/
- public ObservabilityPipelineRsyslogDestination getObservabilityPipelineRsyslogDestination()
- throws ClassCastException {
- return (ObservabilityPipelineRsyslogDestination) super.getActualInstance();
+ public ObservabilityPipelineGoogleChronicleDestination
+ getObservabilityPipelineGoogleChronicleDestination() throws ClassCastException {
+ return (ObservabilityPipelineGoogleChronicleDestination) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineSyslogNgDestination`. If the actual instance
- * is not `ObservabilityPipelineSyslogNgDestination`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`. If the actual
+ * instance is not `ObservabilityPipelineGoogleCloudStorageDestination`, the ClassCastException
+ * will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSyslogNgDestination`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgDestination`
+ * @return The actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineGoogleCloudStorageDestination`
*/
- public ObservabilityPipelineSyslogNgDestination getObservabilityPipelineSyslogNgDestination()
- throws ClassCastException {
- return (ObservabilityPipelineSyslogNgDestination) super.getActualInstance();
+ public ObservabilityPipelineGoogleCloudStorageDestination
+ getObservabilityPipelineGoogleCloudStorageDestination() throws ClassCastException {
+ return (ObservabilityPipelineGoogleCloudStorageDestination) super.getActualInstance();
}
/**
- * Get the actual instance of `AzureStorageDestination`. If the actual instance is not
- * `AzureStorageDestination`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineGooglePubSubDestination`. If the actual
+ * instance is not `ObservabilityPipelineGooglePubSubDestination`, the ClassCastException will be
+ * thrown.
*
- * @return The actual instance of `AzureStorageDestination`
- * @throws ClassCastException if the instance is not `AzureStorageDestination`
+ * @return The actual instance of `ObservabilityPipelineGooglePubSubDestination`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineGooglePubSubDestination`
*/
- public AzureStorageDestination getAzureStorageDestination() throws ClassCastException {
- return (AzureStorageDestination) super.getActualInstance();
+ public ObservabilityPipelineGooglePubSubDestination
+ getObservabilityPipelineGooglePubSubDestination() throws ClassCastException {
+ return (ObservabilityPipelineGooglePubSubDestination) super.getActualInstance();
}
/**
@@ -1564,20 +1734,6 @@ public MicrosoftSentinelDestination getMicrosoftSentinelDestination() throws Cla
return (MicrosoftSentinelDestination) super.getActualInstance();
}
- /**
- * Get the actual instance of `ObservabilityPipelineGoogleChronicleDestination`. If the actual
- * instance is not `ObservabilityPipelineGoogleChronicleDestination`, the ClassCastException will
- * be thrown.
- *
- * @return The actual instance of `ObservabilityPipelineGoogleChronicleDestination`
- * @throws ClassCastException if the instance is not
- * `ObservabilityPipelineGoogleChronicleDestination`
- */
- public ObservabilityPipelineGoogleChronicleDestination
- getObservabilityPipelineGoogleChronicleDestination() throws ClassCastException {
- return (ObservabilityPipelineGoogleChronicleDestination) super.getActualInstance();
- }
-
/**
* Get the actual instance of `ObservabilityPipelineNewRelicDestination`. If the actual instance
* is not `ObservabilityPipelineNewRelicDestination`, the ClassCastException will be thrown.
@@ -1590,19 +1746,6 @@ public ObservabilityPipelineNewRelicDestination getObservabilityPipelineNewRelic
return (ObservabilityPipelineNewRelicDestination) super.getActualInstance();
}
- /**
- * Get the actual instance of `ObservabilityPipelineSentinelOneDestination`. If the actual
- * instance is not `ObservabilityPipelineSentinelOneDestination`, the ClassCastException will be
- * thrown.
- *
- * @return The actual instance of `ObservabilityPipelineSentinelOneDestination`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSentinelOneDestination`
- */
- public ObservabilityPipelineSentinelOneDestination
- getObservabilityPipelineSentinelOneDestination() throws ClassCastException {
- return (ObservabilityPipelineSentinelOneDestination) super.getActualInstance();
- }
-
/**
* Get the actual instance of `ObservabilityPipelineOpenSearchDestination`. If the actual instance
* is not `ObservabilityPipelineOpenSearchDestination`, the ClassCastException will be thrown.
@@ -1616,17 +1759,28 @@ public ObservabilityPipelineOpenSearchDestination getObservabilityPipelineOpenSe
}
/**
- * Get the actual instance of `ObservabilityPipelineAmazonOpenSearchDestination`. If the actual
- * instance is not `ObservabilityPipelineAmazonOpenSearchDestination`, the ClassCastException will
- * be thrown.
+ * Get the actual instance of `ObservabilityPipelineRsyslogDestination`. If the actual instance is
+ * not `ObservabilityPipelineRsyslogDestination`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineAmazonOpenSearchDestination`
- * @throws ClassCastException if the instance is not
- * `ObservabilityPipelineAmazonOpenSearchDestination`
+ * @return The actual instance of `ObservabilityPipelineRsyslogDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineRsyslogDestination`
*/
- public ObservabilityPipelineAmazonOpenSearchDestination
- getObservabilityPipelineAmazonOpenSearchDestination() throws ClassCastException {
- return (ObservabilityPipelineAmazonOpenSearchDestination) super.getActualInstance();
+ public ObservabilityPipelineRsyslogDestination getObservabilityPipelineRsyslogDestination()
+ throws ClassCastException {
+ return (ObservabilityPipelineRsyslogDestination) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineSentinelOneDestination`. If the actual
+ * instance is not `ObservabilityPipelineSentinelOneDestination`, the ClassCastException will be
+ * thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineSentinelOneDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSentinelOneDestination`
+ */
+ public ObservabilityPipelineSentinelOneDestination
+ getObservabilityPipelineSentinelOneDestination() throws ClassCastException {
+ return (ObservabilityPipelineSentinelOneDestination) super.getActualInstance();
}
/**
@@ -1642,44 +1796,52 @@ public ObservabilityPipelineSocketDestination getObservabilityPipelineSocketDest
}
/**
- * Get the actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination`. If the actual
- * instance is not `ObservabilityPipelineAmazonSecurityLakeDestination`, the ClassCastException
- * will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSplunkHecDestination`. If the actual instance
+ * is not `ObservabilityPipelineSplunkHecDestination`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination`
- * @throws ClassCastException if the instance is not
- * `ObservabilityPipelineAmazonSecurityLakeDestination`
+ * @return The actual instance of `ObservabilityPipelineSplunkHecDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecDestination`
*/
- public ObservabilityPipelineAmazonSecurityLakeDestination
- getObservabilityPipelineAmazonSecurityLakeDestination() throws ClassCastException {
- return (ObservabilityPipelineAmazonSecurityLakeDestination) super.getActualInstance();
+ public ObservabilityPipelineSplunkHecDestination getObservabilityPipelineSplunkHecDestination()
+ throws ClassCastException {
+ return (ObservabilityPipelineSplunkHecDestination) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`. If the
- * actual instance is not `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`, the
- * ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSumoLogicDestination`. If the actual instance
+ * is not `ObservabilityPipelineSumoLogicDestination`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`
- * @throws ClassCastException if the instance is not
- * `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`
+ * @return The actual instance of `ObservabilityPipelineSumoLogicDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicDestination`
*/
- public ObservabilityPipelineCrowdStrikeNextGenSiemDestination
- getObservabilityPipelineCrowdStrikeNextGenSiemDestination() throws ClassCastException {
- return (ObservabilityPipelineCrowdStrikeNextGenSiemDestination) super.getActualInstance();
+ public ObservabilityPipelineSumoLogicDestination getObservabilityPipelineSumoLogicDestination()
+ throws ClassCastException {
+ return (ObservabilityPipelineSumoLogicDestination) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineGooglePubSubDestination`. If the actual
- * instance is not `ObservabilityPipelineGooglePubSubDestination`, the ClassCastException will be
- * thrown.
+ * Get the actual instance of `ObservabilityPipelineSyslogNgDestination`. If the actual instance
+ * is not `ObservabilityPipelineSyslogNgDestination`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineGooglePubSubDestination`
+ * @return The actual instance of `ObservabilityPipelineSyslogNgDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgDestination`
+ */
+ public ObservabilityPipelineSyslogNgDestination getObservabilityPipelineSyslogNgDestination()
+ throws ClassCastException {
+ return (ObservabilityPipelineSyslogNgDestination) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineDatadogMetricsDestination`. If the actual
+ * instance is not `ObservabilityPipelineDatadogMetricsDestination`, the ClassCastException will
+ * be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineDatadogMetricsDestination`
* @throws ClassCastException if the instance is not
- * `ObservabilityPipelineGooglePubSubDestination`
+ * `ObservabilityPipelineDatadogMetricsDestination`
*/
- public ObservabilityPipelineGooglePubSubDestination
- getObservabilityPipelineGooglePubSubDestination() throws ClassCastException {
- return (ObservabilityPipelineGooglePubSubDestination) super.getActualInstance();
+ public ObservabilityPipelineDatadogMetricsDestination
+ getObservabilityPipelineDatadogMetricsDestination() throws ClassCastException {
+ return (ObservabilityPipelineDatadogMetricsDestination) super.getActualInstance();
}
}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigPipelineType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigPipelineType.java
new file mode 100644
index 00000000000..7020de23586
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigPipelineType.java
@@ -0,0 +1,65 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** The type of data being ingested. Defaults to logs if not specified. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineConfigPipelineType.ObservabilityPipelineConfigPipelineTypeSerializer
+ .class)
+public class ObservabilityPipelineConfigPipelineType extends ModelEnum {
+
+ private static final Set allowedValues =
+ new HashSet(Arrays.asList("logs", "metrics"));
+
+ public static final ObservabilityPipelineConfigPipelineType LOGS =
+ new ObservabilityPipelineConfigPipelineType("logs");
+ public static final ObservabilityPipelineConfigPipelineType METRICS =
+ new ObservabilityPipelineConfigPipelineType("metrics");
+
+ ObservabilityPipelineConfigPipelineType(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineConfigPipelineTypeSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineConfigPipelineTypeSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineConfigPipelineTypeSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineConfigPipelineType value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineConfigPipelineType fromValue(String value) {
+ return new ObservabilityPipelineConfigPipelineType(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java
index 3ec77e02e20..f857d7a96b8 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java
@@ -139,257 +139,306 @@ public ObservabilityPipelineConfigProcessorItem deserialize(
e);
}
- // deserialize ObservabilityPipelineParseJSONProcessor
+ // deserialize ObservabilityPipelineAddEnvVarsProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class)
- || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)
- || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class)
- || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)
- || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)
+ || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)
+ || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class)
- || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class)
- || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineParseJSONProcessor.class.equals(String.class)
+ (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineParseJSONProcessor.class);
+ .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'");
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'",
e);
}
- // deserialize ObservabilityPipelineQuotaProcessor
+ // deserialize ObservabilityPipelineAddFieldsProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class)
- || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)
- || ObservabilityPipelineQuotaProcessor.class.equals(Float.class)
- || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)
- || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class)
+ || ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class)
+ || ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class)
- || ObservabilityPipelineQuotaProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class)
- || ObservabilityPipelineQuotaProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineQuotaProcessor.class.equals(String.class)
+ (ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class);
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineAddFieldsProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineAddFieldsProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'");
+ log.log(
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineAddFieldsProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineAddFieldsProcessor'",
e);
}
- // deserialize ObservabilityPipelineAddFieldsProcessor
+ // deserialize ObservabilityPipelineCustomProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class)
- || ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class)
- || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class)
- || ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineCustomProcessor.class.equals(Long.class)
+ || ObservabilityPipelineCustomProcessor.class.equals(Float.class)
+ || ObservabilityPipelineCustomProcessor.class.equals(Double.class)
+ || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineCustomProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineCustomProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class)
- || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineCustomProcessor.class.equals(Float.class)
+ || ObservabilityPipelineCustomProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)
+ (ObservabilityPipelineCustomProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineAddFieldsProcessor.class);
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineAddFieldsProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineAddFieldsProcessor'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineAddFieldsProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineCustomProcessor'",
e);
}
- // deserialize ObservabilityPipelineRemoveFieldsProcessor
+ // deserialize ObservabilityPipelineDatadogTagsProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)
- || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class)
- || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)
- || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)
+ || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)
+ || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class)
- || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)
+ (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class);
+ .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER,
- "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'");
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'",
e);
}
- // deserialize ObservabilityPipelineRenameFieldsProcessor
+ // deserialize ObservabilityPipelineDedupeProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)
- || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class)
- || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)
- || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)
+ || ObservabilityPipelineDedupeProcessor.class.equals(Float.class)
+ || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)
+ || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineDedupeProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class)
- || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class)
+ || ObservabilityPipelineDedupeProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)
+ (ObservabilityPipelineDedupeProcessor.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp =
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'",
+ e);
+ }
+
+ // deserialize ObservabilityPipelineEnrichmentTableProcessor
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)
+ || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class)
+ || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)
+ || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class)
+ || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class);
+ .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
Level.FINER,
- "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'");
+ "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'",
e);
}
@@ -445,52 +494,54 @@ public ObservabilityPipelineConfigProcessorItem deserialize(
e);
}
- // deserialize ObservabilityPipelineSampleProcessor
+ // deserialize ObservabilityPipelineOcsfMapperProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class)
- || ObservabilityPipelineSampleProcessor.class.equals(Long.class)
- || ObservabilityPipelineSampleProcessor.class.equals(Float.class)
- || ObservabilityPipelineSampleProcessor.class.equals(Double.class)
- || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineSampleProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class)
+ || ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class)
+ || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class)
+ || ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class)
- || ObservabilityPipelineSampleProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSampleProcessor.class.equals(Float.class)
- || ObservabilityPipelineSampleProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class)
+ || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSampleProcessor.class.equals(String.class)
+ (ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class);
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineOcsfMapperProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineOcsfMapperProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'");
+ log.log(
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMapperProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSampleProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineOcsfMapperProcessor'",
e);
}
@@ -545,460 +596,460 @@ public ObservabilityPipelineConfigProcessorItem deserialize(
e);
}
- // deserialize ObservabilityPipelineSensitiveDataScannerProcessor
+ // deserialize ObservabilityPipelineParseJSONProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class)
- || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class)
- || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class)
- || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class)
- || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)
+ || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class)
+ || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)
+ || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class)
- || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(
- Long.class))
+ ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class)
- || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(
- Double.class))
+ ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class)
+ || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)
+ (ObservabilityPipelineParseJSONProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class);
+ .readValueAs(ObservabilityPipelineParseJSONProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER,
- "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'");
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'",
e);
}
- // deserialize ObservabilityPipelineOcsfMapperProcessor
+ // deserialize ObservabilityPipelineQuotaProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class)
- || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class)
- || ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class)
- || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class)
- || ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)
+ || ObservabilityPipelineQuotaProcessor.class.equals(Float.class)
+ || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)
+ || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class)
- || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineQuotaProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class)
- || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class)
+ || ObservabilityPipelineQuotaProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)
+ (ObservabilityPipelineQuotaProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp =
- tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineOcsfMapperProcessor.class);
+ tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineOcsfMapperProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMapperProcessor'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineOcsfMapperProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'",
e);
}
- // deserialize ObservabilityPipelineAddEnvVarsProcessor
+ // deserialize ObservabilityPipelineReduceProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)
- || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class)
- || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)
- || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineReduceProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineReduceProcessor.class.equals(Long.class)
+ || ObservabilityPipelineReduceProcessor.class.equals(Float.class)
+ || ObservabilityPipelineReduceProcessor.class.equals(Double.class)
+ || ObservabilityPipelineReduceProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineReduceProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineReduceProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineReduceProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class)
- || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineReduceProcessor.class.equals(Float.class)
+ || ObservabilityPipelineReduceProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineReduceProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)
+ (ObservabilityPipelineReduceProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class);
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineReduceProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineReduceProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineReduceProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineReduceProcessor'",
e);
}
- // deserialize ObservabilityPipelineDedupeProcessor
+ // deserialize ObservabilityPipelineRemoveFieldsProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class)
- || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)
- || ObservabilityPipelineDedupeProcessor.class.equals(Float.class)
- || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)
- || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)
+ || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)
+ || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class)
- || ObservabilityPipelineDedupeProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class)
- || ObservabilityPipelineDedupeProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineDedupeProcessor.class.equals(String.class)
+ (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class);
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'");
+ log.log(
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'",
e);
}
- // deserialize ObservabilityPipelineEnrichmentTableProcessor
+ // deserialize ObservabilityPipelineRenameFieldsProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class)
- || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)
- || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class)
- || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)
- || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)
+ || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)
+ || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class)
- || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class)
- || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)
+ (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class);
+ .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
Level.FINER,
- "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'");
+ "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'",
e);
}
- // deserialize ObservabilityPipelineReduceProcessor
+ // deserialize ObservabilityPipelineSampleProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineReduceProcessor.class.equals(Integer.class)
- || ObservabilityPipelineReduceProcessor.class.equals(Long.class)
- || ObservabilityPipelineReduceProcessor.class.equals(Float.class)
- || ObservabilityPipelineReduceProcessor.class.equals(Double.class)
- || ObservabilityPipelineReduceProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineReduceProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineSampleProcessor.class.equals(Long.class)
+ || ObservabilityPipelineSampleProcessor.class.equals(Float.class)
+ || ObservabilityPipelineSampleProcessor.class.equals(Double.class)
+ || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineSampleProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineReduceProcessor.class.equals(Integer.class)
- || ObservabilityPipelineReduceProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineSampleProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineReduceProcessor.class.equals(Float.class)
- || ObservabilityPipelineReduceProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineSampleProcessor.class.equals(Float.class)
+ || ObservabilityPipelineSampleProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineReduceProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineReduceProcessor.class.equals(String.class)
+ (ObservabilityPipelineSampleProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineReduceProcessor.class);
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineReduceProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineReduceProcessor'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineReduceProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineSampleProcessor'",
e);
}
- // deserialize ObservabilityPipelineThrottleProcessor
+ // deserialize ObservabilityPipelineSensitiveDataScannerProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class)
- || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)
- || ObservabilityPipelineThrottleProcessor.class.equals(Float.class)
- || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)
- || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class)
+ || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class)
+ || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class)
+ || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class)
- || ObservabilityPipelineThrottleProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(
+ Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class)
- || ObservabilityPipelineThrottleProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class)
+ || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(
+ Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineThrottleProcessor.class.equals(String.class)
+ (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineThrottleProcessor.class);
+ .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'");
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'",
e);
}
- // deserialize ObservabilityPipelineCustomProcessor
+ // deserialize ObservabilityPipelineThrottleProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class)
- || ObservabilityPipelineCustomProcessor.class.equals(Long.class)
- || ObservabilityPipelineCustomProcessor.class.equals(Float.class)
- || ObservabilityPipelineCustomProcessor.class.equals(Double.class)
- || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineCustomProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)
+ || ObservabilityPipelineThrottleProcessor.class.equals(Float.class)
+ || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)
+ || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class)
- || ObservabilityPipelineCustomProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineThrottleProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineCustomProcessor.class.equals(Float.class)
- || ObservabilityPipelineCustomProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class)
+ || ObservabilityPipelineThrottleProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineCustomProcessor.class.equals(String.class)
+ (ObservabilityPipelineThrottleProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class);
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineThrottleProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'");
+ log.log(
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineCustomProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'",
e);
}
- // deserialize ObservabilityPipelineDatadogTagsProcessor
+ // deserialize ObservabilityPipelineMetricTagsProcessor
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)
- || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class)
- || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)
- || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class)
- || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) {
+ if (ObservabilityPipelineMetricTagsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineMetricTagsProcessor.class.equals(Long.class)
+ || ObservabilityPipelineMetricTagsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineMetricTagsProcessor.class.equals(Double.class)
+ || ObservabilityPipelineMetricTagsProcessor.class.equals(Boolean.class)
+ || ObservabilityPipelineMetricTagsProcessor.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class)
- || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class))
+ ((ObservabilityPipelineMetricTagsProcessor.class.equals(Integer.class)
+ || ObservabilityPipelineMetricTagsProcessor.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class)
- || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class))
+ ((ObservabilityPipelineMetricTagsProcessor.class.equals(Float.class)
+ || ObservabilityPipelineMetricTagsProcessor.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class)
+ (ObservabilityPipelineMetricTagsProcessor.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)
+ (ObservabilityPipelineMetricTagsProcessor.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class);
+ .readValueAs(ObservabilityPipelineMetricTagsProcessor.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) {
+ if (!((ObservabilityPipelineMetricTagsProcessor) tmp).unparsed) {
deserialized = tmp;
match++;
}
log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'");
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineMetricTagsProcessor'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'",
+ "Input data does not match schema 'ObservabilityPipelineMetricTagsProcessor'",
e);
}
@@ -1037,27 +1088,32 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineFilterProce
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) {
+ super("oneOf", Boolean.FALSE);
+ setActualInstance(o);
+ }
+
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
@@ -1067,7 +1123,7 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineGenerateMet
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
@@ -1077,48 +1133,48 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseGrokPr
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(
- ObservabilityPipelineSensitiveDataScannerProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(
+ ObservabilityPipelineSensitiveDataScannerProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) {
+ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineMetricTagsProcessor o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
@@ -1128,56 +1184,59 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTags
"ObservabilityPipelineFilterProcessor",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineParseJSONProcessor",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineQuotaProcessor",
- new GenericType() {});
+ "ObservabilityPipelineAddEnvVarsProcessor",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineAddFieldsProcessor",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineRemoveFieldsProcessor",
- new GenericType() {});
+ "ObservabilityPipelineCustomProcessor",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineRenameFieldsProcessor",
- new GenericType() {});
+ "ObservabilityPipelineDatadogTagsProcessor",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineDedupeProcessor",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineEnrichmentTableProcessor",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineGenerateMetricsProcessor",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineSampleProcessor",
- new GenericType() {});
+ "ObservabilityPipelineOcsfMapperProcessor",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineParseGrokProcessor",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineSensitiveDataScannerProcessor",
- new GenericType() {});
+ "ObservabilityPipelineParseJSONProcessor",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineOcsfMapperProcessor",
- new GenericType() {});
+ "ObservabilityPipelineQuotaProcessor",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineAddEnvVarsProcessor",
- new GenericType() {});
+ "ObservabilityPipelineReduceProcessor",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineDedupeProcessor",
- new GenericType() {});
+ "ObservabilityPipelineRemoveFieldsProcessor",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineRenameFieldsProcessor",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineEnrichmentTableProcessor",
- new GenericType() {});
+ "ObservabilityPipelineSampleProcessor",
+ new GenericType() {});
schemas.put(
- "ObservabilityPipelineReduceProcessor",
- new GenericType() {});
+ "ObservabilityPipelineSensitiveDataScannerProcessor",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineThrottleProcessor",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineCustomProcessor",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineDatadogTagsProcessor",
- new GenericType() {});
+ "ObservabilityPipelineMetricTagsProcessor",
+ new GenericType() {});
JSON.registerDescendants(
ObservabilityPipelineConfigProcessorItem.class, Collections.unmodifiableMap(schemas));
}
@@ -1190,15 +1249,15 @@ public Map getSchemas() {
/**
* Set the instance that matches the oneOf child schema, check the instance parameter is valid
* against the oneOf child schemas: ObservabilityPipelineFilterProcessor,
- * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor,
- * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor,
- * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor,
- * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor,
- * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor,
- * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor,
- * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor,
- * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor,
- * ObservabilityPipelineDatadogTagsProcessor
+ * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor,
+ * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor,
+ * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor,
+ * ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineOcsfMapperProcessor,
+ * ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor,
+ * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor,
+ * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor,
+ * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor,
+ * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineMetricTagsProcessor
*
* It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a
* composed schema (allOf, anyOf, oneOf).
@@ -1211,27 +1270,32 @@ public void setActualInstance(Object instance) {
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) {
+ super.setActualInstance(instance);
+ return;
+ }
+ if (JSON.isInstanceOf(
+ ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1241,7 +1305,7 @@ public void setActualInstance(Object instance) {
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1251,49 +1315,49 @@ public void setActualInstance(Object instance) {
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSensitiveDataScannerProcessor.class,
- instance,
- new HashSet>())) {
+ ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineSensitiveDataScannerProcessor.class,
+ instance,
+ new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) {
+ ObservabilityPipelineMetricTagsProcessor.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1304,42 +1368,41 @@ public void setActualInstance(Object instance) {
}
throw new RuntimeException(
"Invalid instance type. Must be ObservabilityPipelineFilterProcessor,"
- + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor,"
- + " ObservabilityPipelineAddFieldsProcessor,"
- + " ObservabilityPipelineRemoveFieldsProcessor,"
- + " ObservabilityPipelineRenameFieldsProcessor,"
- + " ObservabilityPipelineGenerateMetricsProcessor,"
- + " ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor,"
- + " ObservabilityPipelineSensitiveDataScannerProcessor,"
- + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor,"
+ + " ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor,"
+ + " ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor,"
+ " ObservabilityPipelineDedupeProcessor,"
+ " ObservabilityPipelineEnrichmentTableProcessor,"
- + " ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor,"
- + " ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor");
+ + " ObservabilityPipelineGenerateMetricsProcessor,"
+ + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor,"
+ + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor,"
+ + " ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor,"
+ + " ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineSampleProcessor,"
+ + " ObservabilityPipelineSensitiveDataScannerProcessor,"
+ + " ObservabilityPipelineThrottleProcessor, ObservabilityPipelineMetricTagsProcessor");
}
/**
* Get the actual instance, which can be the following: ObservabilityPipelineFilterProcessor,
- * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor,
- * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor,
- * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor,
- * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor,
- * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor,
- * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor,
- * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor,
- * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor,
- * ObservabilityPipelineDatadogTagsProcessor
+ * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor,
+ * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor,
+ * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor,
+ * ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineOcsfMapperProcessor,
+ * ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor,
+ * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor,
+ * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor,
+ * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor,
+ * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineMetricTagsProcessor
*
* @return The actual instance (ObservabilityPipelineFilterProcessor,
- * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor,
- * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor,
- * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor,
- * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor,
- * ObservabilityPipelineSensitiveDataScannerProcessor,
- * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor,
+ * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor,
+ * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor,
* ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor,
- * ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor,
- * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor)
+ * ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineOcsfMapperProcessor,
+ * ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor,
+ * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor,
+ * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor,
+ * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor,
+ * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineMetricTagsProcessor)
*/
@Override
public Object getActualInstance() {
@@ -1359,27 +1422,15 @@ public ObservabilityPipelineFilterProcessor getObservabilityPipelineFilterProces
}
/**
- * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is
- * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown.
- *
- * @return The actual instance of `ObservabilityPipelineParseJSONProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor`
- */
- public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor()
- throws ClassCastException {
- return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance();
- }
-
- /**
- * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not
- * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance
+ * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineQuotaProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor`
+ * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor`
*/
- public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor()
+ public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor()
throws ClassCastException {
- return (ObservabilityPipelineQuotaProcessor) super.getActualInstance();
+ return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance();
}
/**
@@ -1395,27 +1446,53 @@ public ObservabilityPipelineAddFieldsProcessor getObservabilityPipelineAddFields
}
/**
- * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance
- * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is
+ * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor`
+ * @return The actual instance of `ObservabilityPipelineCustomProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor`
*/
- public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor()
+ public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor()
throws ClassCastException {
- return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance();
+ return (ObservabilityPipelineCustomProcessor) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance
- * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance
+ * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor`
+ * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor`
*/
- public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor()
+ public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor()
throws ClassCastException {
- return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance();
+ return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is
+ * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineDedupeProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor`
+ */
+ public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor()
+ throws ClassCastException {
+ return (ObservabilityPipelineDedupeProcessor) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual
+ * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be
+ * thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineEnrichmentTableProcessor`
+ */
+ public ObservabilityPipelineEnrichmentTableProcessor
+ getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException {
+ return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance();
}
/**
@@ -1433,15 +1510,15 @@ public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRename
}
/**
- * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is
- * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance
+ * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSampleProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor`
+ * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor`
*/
- public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor()
+ public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor()
throws ClassCastException {
- return (ObservabilityPipelineSampleProcessor) super.getActualInstance();
+ return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance();
}
/**
@@ -1457,79 +1534,89 @@ public ObservabilityPipelineParseGrokProcessor getObservabilityPipelineParseGrok
}
/**
- * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual
- * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException
- * will be thrown.
+ * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is
+ * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`
- * @throws ClassCastException if the instance is not
- * `ObservabilityPipelineSensitiveDataScannerProcessor`
+ * @return The actual instance of `ObservabilityPipelineParseJSONProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor`
*/
- public ObservabilityPipelineSensitiveDataScannerProcessor
- getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException {
- return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance();
+ public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor()
+ throws ClassCastException {
+ return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance
- * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not
+ * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor`
+ * @return The actual instance of `ObservabilityPipelineQuotaProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor`
*/
- public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor()
+ public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor()
throws ClassCastException {
- return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance();
+ return (ObservabilityPipelineQuotaProcessor) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance
- * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is
+ * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor`
+ * @return The actual instance of `ObservabilityPipelineReduceProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor`
*/
- public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor()
+ public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor()
throws ClassCastException {
- return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance();
+ return (ObservabilityPipelineReduceProcessor) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is
- * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance
+ * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineDedupeProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor`
+ * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor`
*/
- public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor()
+ public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor()
throws ClassCastException {
- return (ObservabilityPipelineDedupeProcessor) super.getActualInstance();
+ return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual
- * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be
- * thrown.
+ * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance
+ * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor`
- * @throws ClassCastException if the instance is not
- * `ObservabilityPipelineEnrichmentTableProcessor`
+ * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor`
*/
- public ObservabilityPipelineEnrichmentTableProcessor
- getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException {
- return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance();
+ public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor()
+ throws ClassCastException {
+ return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is
- * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is
+ * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineReduceProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor`
+ * @return The actual instance of `ObservabilityPipelineSampleProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor`
*/
- public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor()
+ public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor()
throws ClassCastException {
- return (ObservabilityPipelineReduceProcessor) super.getActualInstance();
+ return (ObservabilityPipelineSampleProcessor) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual
+ * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException
+ * will be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineSensitiveDataScannerProcessor`
+ */
+ public ObservabilityPipelineSensitiveDataScannerProcessor
+ getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException {
+ return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance();
}
/**
@@ -1545,26 +1632,14 @@ public ObservabilityPipelineThrottleProcessor getObservabilityPipelineThrottlePr
}
/**
- * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is
- * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown.
- *
- * @return The actual instance of `ObservabilityPipelineCustomProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor`
- */
- public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor()
- throws ClassCastException {
- return (ObservabilityPipelineCustomProcessor) super.getActualInstance();
- }
-
- /**
- * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance
- * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineMetricTagsProcessor`. If the actual instance
+ * is not `ObservabilityPipelineMetricTagsProcessor`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor`
+ * @return The actual instance of `ObservabilityPipelineMetricTagsProcessor`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineMetricTagsProcessor`
*/
- public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor()
+ public ObservabilityPipelineMetricTagsProcessor getObservabilityPipelineMetricTagsProcessor()
throws ClassCastException {
- return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance();
+ return (ObservabilityPipelineMetricTagsProcessor) super.getActualInstance();
}
}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java
index 58c17babac0..600031dac67 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java
@@ -89,52 +89,6 @@ public ObservabilityPipelineConfigSourceItem deserialize(
boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS);
int match = 0;
JsonToken token = tree.traverse(jp.getCodec()).nextToken();
- // deserialize ObservabilityPipelineKafkaSource
- try {
- boolean attemptParsing = true;
- // ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineKafkaSource.class.equals(Integer.class)
- || ObservabilityPipelineKafkaSource.class.equals(Long.class)
- || ObservabilityPipelineKafkaSource.class.equals(Float.class)
- || ObservabilityPipelineKafkaSource.class.equals(Double.class)
- || ObservabilityPipelineKafkaSource.class.equals(Boolean.class)
- || ObservabilityPipelineKafkaSource.class.equals(String.class)) {
- attemptParsing = typeCoercion;
- if (!attemptParsing) {
- attemptParsing |=
- ((ObservabilityPipelineKafkaSource.class.equals(Integer.class)
- || ObservabilityPipelineKafkaSource.class.equals(Long.class))
- && token == JsonToken.VALUE_NUMBER_INT);
- attemptParsing |=
- ((ObservabilityPipelineKafkaSource.class.equals(Float.class)
- || ObservabilityPipelineKafkaSource.class.equals(Double.class))
- && (token == JsonToken.VALUE_NUMBER_FLOAT
- || token == JsonToken.VALUE_NUMBER_INT));
- attemptParsing |=
- (ObservabilityPipelineKafkaSource.class.equals(Boolean.class)
- && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
- attemptParsing |=
- (ObservabilityPipelineKafkaSource.class.equals(String.class)
- && token == JsonToken.VALUE_STRING);
- }
- }
- if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaSource.class);
- // TODO: there is no validation against JSON schema constraints
- // (min, max, enum, pattern...), this does not perform a strict JSON
- // validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineKafkaSource) tmp).unparsed) {
- deserialized = tmp;
- match++;
- }
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaSource'");
- }
- } catch (Exception e) {
- // deserialization failed, continue
- log.log(
- Level.FINER, "Input data does not match schema 'ObservabilityPipelineKafkaSource'", e);
- }
-
// deserialize ObservabilityPipelineDatadogAgentSource
try {
boolean attemptParsing = true;
@@ -186,149 +140,152 @@ public ObservabilityPipelineConfigSourceItem deserialize(
e);
}
- // deserialize ObservabilityPipelineSplunkTcpSource
+ // deserialize ObservabilityPipelineAmazonDataFirehoseSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class)
- || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class)
- || ObservabilityPipelineSplunkTcpSource.class.equals(Float.class)
- || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class)
- || ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class)
- || ObservabilityPipelineSplunkTcpSource.class.equals(String.class)) {
+ if (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class)
+ || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class)
+ || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class)
+ || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class)
+ || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class)
- || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class))
+ ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSplunkTcpSource.class.equals(Float.class)
- || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class))
+ ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class)
+ || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class)
+ (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSplunkTcpSource.class.equals(String.class)
+ (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkTcpSource.class);
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineAmazonDataFirehoseSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSplunkTcpSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineAmazonDataFirehoseSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkTcpSource'");
+ log.log(
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineAmazonDataFirehoseSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSplunkTcpSource'",
+ "Input data does not match schema 'ObservabilityPipelineAmazonDataFirehoseSource'",
e);
}
- // deserialize ObservabilityPipelineSplunkHecSource
+ // deserialize ObservabilityPipelineAmazonS3Source
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSplunkHecSource.class.equals(Integer.class)
- || ObservabilityPipelineSplunkHecSource.class.equals(Long.class)
- || ObservabilityPipelineSplunkHecSource.class.equals(Float.class)
- || ObservabilityPipelineSplunkHecSource.class.equals(Double.class)
- || ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class)
- || ObservabilityPipelineSplunkHecSource.class.equals(String.class)) {
+ if (ObservabilityPipelineAmazonS3Source.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonS3Source.class.equals(Long.class)
+ || ObservabilityPipelineAmazonS3Source.class.equals(Float.class)
+ || ObservabilityPipelineAmazonS3Source.class.equals(Double.class)
+ || ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class)
+ || ObservabilityPipelineAmazonS3Source.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSplunkHecSource.class.equals(Integer.class)
- || ObservabilityPipelineSplunkHecSource.class.equals(Long.class))
+ ((ObservabilityPipelineAmazonS3Source.class.equals(Integer.class)
+ || ObservabilityPipelineAmazonS3Source.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSplunkHecSource.class.equals(Float.class)
- || ObservabilityPipelineSplunkHecSource.class.equals(Double.class))
+ ((ObservabilityPipelineAmazonS3Source.class.equals(Float.class)
+ || ObservabilityPipelineAmazonS3Source.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class)
+ (ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSplunkHecSource.class.equals(String.class)
+ (ObservabilityPipelineAmazonS3Source.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkHecSource.class);
+ tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineAmazonS3Source.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSplunkHecSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineAmazonS3Source) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Source'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSplunkHecSource'",
+ "Input data does not match schema 'ObservabilityPipelineAmazonS3Source'",
e);
}
- // deserialize ObservabilityPipelineAmazonS3Source
+ // deserialize ObservabilityPipelineFluentBitSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineAmazonS3Source.class.equals(Integer.class)
- || ObservabilityPipelineAmazonS3Source.class.equals(Long.class)
- || ObservabilityPipelineAmazonS3Source.class.equals(Float.class)
- || ObservabilityPipelineAmazonS3Source.class.equals(Double.class)
- || ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class)
- || ObservabilityPipelineAmazonS3Source.class.equals(String.class)) {
+ if (ObservabilityPipelineFluentBitSource.class.equals(Integer.class)
+ || ObservabilityPipelineFluentBitSource.class.equals(Long.class)
+ || ObservabilityPipelineFluentBitSource.class.equals(Float.class)
+ || ObservabilityPipelineFluentBitSource.class.equals(Double.class)
+ || ObservabilityPipelineFluentBitSource.class.equals(Boolean.class)
+ || ObservabilityPipelineFluentBitSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineAmazonS3Source.class.equals(Integer.class)
- || ObservabilityPipelineAmazonS3Source.class.equals(Long.class))
+ ((ObservabilityPipelineFluentBitSource.class.equals(Integer.class)
+ || ObservabilityPipelineFluentBitSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineAmazonS3Source.class.equals(Float.class)
- || ObservabilityPipelineAmazonS3Source.class.equals(Double.class))
+ ((ObservabilityPipelineFluentBitSource.class.equals(Float.class)
+ || ObservabilityPipelineFluentBitSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class)
+ (ObservabilityPipelineFluentBitSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineAmazonS3Source.class.equals(String.class)
+ (ObservabilityPipelineFluentBitSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineAmazonS3Source.class);
+ tmp =
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFluentBitSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineAmazonS3Source) tmp).unparsed) {
+ if (!((ObservabilityPipelineFluentBitSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Source'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFluentBitSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineAmazonS3Source'",
+ "Input data does not match schema 'ObservabilityPipelineFluentBitSource'",
e);
}
@@ -380,493 +337,536 @@ public ObservabilityPipelineConfigSourceItem deserialize(
e);
}
- // deserialize ObservabilityPipelineFluentBitSource
+ // deserialize ObservabilityPipelineGooglePubSubSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineFluentBitSource.class.equals(Integer.class)
- || ObservabilityPipelineFluentBitSource.class.equals(Long.class)
- || ObservabilityPipelineFluentBitSource.class.equals(Float.class)
- || ObservabilityPipelineFluentBitSource.class.equals(Double.class)
- || ObservabilityPipelineFluentBitSource.class.equals(Boolean.class)
- || ObservabilityPipelineFluentBitSource.class.equals(String.class)) {
+ if (ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class)
+ || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class)
+ || ObservabilityPipelineGooglePubSubSource.class.equals(Float.class)
+ || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class)
+ || ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class)
+ || ObservabilityPipelineGooglePubSubSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineFluentBitSource.class.equals(Integer.class)
- || ObservabilityPipelineFluentBitSource.class.equals(Long.class))
+ ((ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class)
+ || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineFluentBitSource.class.equals(Float.class)
- || ObservabilityPipelineFluentBitSource.class.equals(Double.class))
+ ((ObservabilityPipelineGooglePubSubSource.class.equals(Float.class)
+ || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineFluentBitSource.class.equals(Boolean.class)
+ (ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineFluentBitSource.class.equals(String.class)
+ (ObservabilityPipelineGooglePubSubSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFluentBitSource.class);
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineGooglePubSubSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineFluentBitSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineGooglePubSubSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFluentBitSource'");
+ log.log(
+ Level.FINER, "Input data matches schema 'ObservabilityPipelineGooglePubSubSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineFluentBitSource'",
+ "Input data does not match schema 'ObservabilityPipelineGooglePubSubSource'",
e);
}
- // deserialize ObservabilityPipelineHttpServerSource
+ // deserialize ObservabilityPipelineHttpClientSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineHttpServerSource.class.equals(Integer.class)
- || ObservabilityPipelineHttpServerSource.class.equals(Long.class)
- || ObservabilityPipelineHttpServerSource.class.equals(Float.class)
- || ObservabilityPipelineHttpServerSource.class.equals(Double.class)
- || ObservabilityPipelineHttpServerSource.class.equals(Boolean.class)
- || ObservabilityPipelineHttpServerSource.class.equals(String.class)) {
+ if (ObservabilityPipelineHttpClientSource.class.equals(Integer.class)
+ || ObservabilityPipelineHttpClientSource.class.equals(Long.class)
+ || ObservabilityPipelineHttpClientSource.class.equals(Float.class)
+ || ObservabilityPipelineHttpClientSource.class.equals(Double.class)
+ || ObservabilityPipelineHttpClientSource.class.equals(Boolean.class)
+ || ObservabilityPipelineHttpClientSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineHttpServerSource.class.equals(Integer.class)
- || ObservabilityPipelineHttpServerSource.class.equals(Long.class))
+ ((ObservabilityPipelineHttpClientSource.class.equals(Integer.class)
+ || ObservabilityPipelineHttpClientSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineHttpServerSource.class.equals(Float.class)
- || ObservabilityPipelineHttpServerSource.class.equals(Double.class))
+ ((ObservabilityPipelineHttpClientSource.class.equals(Float.class)
+ || ObservabilityPipelineHttpClientSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineHttpServerSource.class.equals(Boolean.class)
+ (ObservabilityPipelineHttpClientSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineHttpServerSource.class.equals(String.class)
+ (ObservabilityPipelineHttpClientSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpServerSource.class);
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpClientSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineHttpServerSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineHttpClientSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpServerSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpClientSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineHttpServerSource'",
+ "Input data does not match schema 'ObservabilityPipelineHttpClientSource'",
e);
}
- // deserialize ObservabilityPipelineSumoLogicSource
+ // deserialize ObservabilityPipelineHttpServerSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSumoLogicSource.class.equals(Integer.class)
- || ObservabilityPipelineSumoLogicSource.class.equals(Long.class)
- || ObservabilityPipelineSumoLogicSource.class.equals(Float.class)
- || ObservabilityPipelineSumoLogicSource.class.equals(Double.class)
- || ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class)
- || ObservabilityPipelineSumoLogicSource.class.equals(String.class)) {
+ if (ObservabilityPipelineHttpServerSource.class.equals(Integer.class)
+ || ObservabilityPipelineHttpServerSource.class.equals(Long.class)
+ || ObservabilityPipelineHttpServerSource.class.equals(Float.class)
+ || ObservabilityPipelineHttpServerSource.class.equals(Double.class)
+ || ObservabilityPipelineHttpServerSource.class.equals(Boolean.class)
+ || ObservabilityPipelineHttpServerSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSumoLogicSource.class.equals(Integer.class)
- || ObservabilityPipelineSumoLogicSource.class.equals(Long.class))
+ ((ObservabilityPipelineHttpServerSource.class.equals(Integer.class)
+ || ObservabilityPipelineHttpServerSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSumoLogicSource.class.equals(Float.class)
- || ObservabilityPipelineSumoLogicSource.class.equals(Double.class))
+ ((ObservabilityPipelineHttpServerSource.class.equals(Float.class)
+ || ObservabilityPipelineHttpServerSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class)
+ (ObservabilityPipelineHttpServerSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSumoLogicSource.class.equals(String.class)
+ (ObservabilityPipelineHttpServerSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSumoLogicSource.class);
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpServerSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSumoLogicSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineHttpServerSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpServerSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSumoLogicSource'",
+ "Input data does not match schema 'ObservabilityPipelineHttpServerSource'",
e);
}
- // deserialize ObservabilityPipelineRsyslogSource
+ // deserialize ObservabilityPipelineKafkaSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineRsyslogSource.class.equals(Integer.class)
- || ObservabilityPipelineRsyslogSource.class.equals(Long.class)
- || ObservabilityPipelineRsyslogSource.class.equals(Float.class)
- || ObservabilityPipelineRsyslogSource.class.equals(Double.class)
- || ObservabilityPipelineRsyslogSource.class.equals(Boolean.class)
- || ObservabilityPipelineRsyslogSource.class.equals(String.class)) {
+ if (ObservabilityPipelineKafkaSource.class.equals(Integer.class)
+ || ObservabilityPipelineKafkaSource.class.equals(Long.class)
+ || ObservabilityPipelineKafkaSource.class.equals(Float.class)
+ || ObservabilityPipelineKafkaSource.class.equals(Double.class)
+ || ObservabilityPipelineKafkaSource.class.equals(Boolean.class)
+ || ObservabilityPipelineKafkaSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineRsyslogSource.class.equals(Integer.class)
- || ObservabilityPipelineRsyslogSource.class.equals(Long.class))
+ ((ObservabilityPipelineKafkaSource.class.equals(Integer.class)
+ || ObservabilityPipelineKafkaSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineRsyslogSource.class.equals(Float.class)
- || ObservabilityPipelineRsyslogSource.class.equals(Double.class))
+ ((ObservabilityPipelineKafkaSource.class.equals(Float.class)
+ || ObservabilityPipelineKafkaSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineRsyslogSource.class.equals(Boolean.class)
+ (ObservabilityPipelineKafkaSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineRsyslogSource.class.equals(String.class)
+ (ObservabilityPipelineKafkaSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineRsyslogSource.class);
+ tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineRsyslogSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineKafkaSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
- Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineRsyslogSource'",
- e);
+ Level.FINER, "Input data does not match schema 'ObservabilityPipelineKafkaSource'", e);
}
- // deserialize ObservabilityPipelineSyslogNgSource
+ // deserialize ObservabilityPipelineLogstashSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSyslogNgSource.class.equals(Integer.class)
- || ObservabilityPipelineSyslogNgSource.class.equals(Long.class)
- || ObservabilityPipelineSyslogNgSource.class.equals(Float.class)
- || ObservabilityPipelineSyslogNgSource.class.equals(Double.class)
- || ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class)
- || ObservabilityPipelineSyslogNgSource.class.equals(String.class)) {
+ if (ObservabilityPipelineLogstashSource.class.equals(Integer.class)
+ || ObservabilityPipelineLogstashSource.class.equals(Long.class)
+ || ObservabilityPipelineLogstashSource.class.equals(Float.class)
+ || ObservabilityPipelineLogstashSource.class.equals(Double.class)
+ || ObservabilityPipelineLogstashSource.class.equals(Boolean.class)
+ || ObservabilityPipelineLogstashSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSyslogNgSource.class.equals(Integer.class)
- || ObservabilityPipelineSyslogNgSource.class.equals(Long.class))
+ ((ObservabilityPipelineLogstashSource.class.equals(Integer.class)
+ || ObservabilityPipelineLogstashSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSyslogNgSource.class.equals(Float.class)
- || ObservabilityPipelineSyslogNgSource.class.equals(Double.class))
+ ((ObservabilityPipelineLogstashSource.class.equals(Float.class)
+ || ObservabilityPipelineLogstashSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class)
+ (ObservabilityPipelineLogstashSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSyslogNgSource.class.equals(String.class)
+ (ObservabilityPipelineLogstashSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSyslogNgSource.class);
+ tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineLogstashSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSyslogNgSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineLogstashSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineLogstashSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineSyslogNgSource'",
+ "Input data does not match schema 'ObservabilityPipelineLogstashSource'",
e);
}
- // deserialize ObservabilityPipelineAmazonDataFirehoseSource
+ // deserialize ObservabilityPipelineRsyslogSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class)
- || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class)
- || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class)
- || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class)
- || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class)
- || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class)) {
+ if (ObservabilityPipelineRsyslogSource.class.equals(Integer.class)
+ || ObservabilityPipelineRsyslogSource.class.equals(Long.class)
+ || ObservabilityPipelineRsyslogSource.class.equals(Float.class)
+ || ObservabilityPipelineRsyslogSource.class.equals(Double.class)
+ || ObservabilityPipelineRsyslogSource.class.equals(Boolean.class)
+ || ObservabilityPipelineRsyslogSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class)
- || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class))
+ ((ObservabilityPipelineRsyslogSource.class.equals(Integer.class)
+ || ObservabilityPipelineRsyslogSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class)
- || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class))
+ ((ObservabilityPipelineRsyslogSource.class.equals(Float.class)
+ || ObservabilityPipelineRsyslogSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class)
+ (ObservabilityPipelineRsyslogSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class)
+ (ObservabilityPipelineRsyslogSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp =
- tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineAmazonDataFirehoseSource.class);
+ tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineRsyslogSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineAmazonDataFirehoseSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineRsyslogSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(
- Level.FINER,
- "Input data matches schema 'ObservabilityPipelineAmazonDataFirehoseSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineAmazonDataFirehoseSource'",
+ "Input data does not match schema 'ObservabilityPipelineRsyslogSource'",
e);
}
- // deserialize ObservabilityPipelineGooglePubSubSource
+ // deserialize ObservabilityPipelineSocketSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class)
- || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class)
- || ObservabilityPipelineGooglePubSubSource.class.equals(Float.class)
- || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class)
- || ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class)
- || ObservabilityPipelineGooglePubSubSource.class.equals(String.class)) {
+ if (ObservabilityPipelineSocketSource.class.equals(Integer.class)
+ || ObservabilityPipelineSocketSource.class.equals(Long.class)
+ || ObservabilityPipelineSocketSource.class.equals(Float.class)
+ || ObservabilityPipelineSocketSource.class.equals(Double.class)
+ || ObservabilityPipelineSocketSource.class.equals(Boolean.class)
+ || ObservabilityPipelineSocketSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class)
- || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class))
+ ((ObservabilityPipelineSocketSource.class.equals(Integer.class)
+ || ObservabilityPipelineSocketSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineGooglePubSubSource.class.equals(Float.class)
- || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class))
+ ((ObservabilityPipelineSocketSource.class.equals(Float.class)
+ || ObservabilityPipelineSocketSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class)
+ (ObservabilityPipelineSocketSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineGooglePubSubSource.class.equals(String.class)
+ (ObservabilityPipelineSocketSource.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSocketSource.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineSocketSource) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSocketSource'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER, "Input data does not match schema 'ObservabilityPipelineSocketSource'", e);
+ }
+
+ // deserialize ObservabilityPipelineSplunkHecSource
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineSplunkHecSource.class.equals(Integer.class)
+ || ObservabilityPipelineSplunkHecSource.class.equals(Long.class)
+ || ObservabilityPipelineSplunkHecSource.class.equals(Float.class)
+ || ObservabilityPipelineSplunkHecSource.class.equals(Double.class)
+ || ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class)
+ || ObservabilityPipelineSplunkHecSource.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineSplunkHecSource.class.equals(Integer.class)
+ || ObservabilityPipelineSplunkHecSource.class.equals(Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineSplunkHecSource.class.equals(Float.class)
+ || ObservabilityPipelineSplunkHecSource.class.equals(Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineSplunkHecSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec())
- .readValueAs(ObservabilityPipelineGooglePubSubSource.class);
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkHecSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineGooglePubSubSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineSplunkHecSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(
- Level.FINER, "Input data matches schema 'ObservabilityPipelineGooglePubSubSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineGooglePubSubSource'",
+ "Input data does not match schema 'ObservabilityPipelineSplunkHecSource'",
e);
}
- // deserialize ObservabilityPipelineHttpClientSource
+ // deserialize ObservabilityPipelineSplunkTcpSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineHttpClientSource.class.equals(Integer.class)
- || ObservabilityPipelineHttpClientSource.class.equals(Long.class)
- || ObservabilityPipelineHttpClientSource.class.equals(Float.class)
- || ObservabilityPipelineHttpClientSource.class.equals(Double.class)
- || ObservabilityPipelineHttpClientSource.class.equals(Boolean.class)
- || ObservabilityPipelineHttpClientSource.class.equals(String.class)) {
+ if (ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class)
+ || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class)
+ || ObservabilityPipelineSplunkTcpSource.class.equals(Float.class)
+ || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class)
+ || ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class)
+ || ObservabilityPipelineSplunkTcpSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineHttpClientSource.class.equals(Integer.class)
- || ObservabilityPipelineHttpClientSource.class.equals(Long.class))
+ ((ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class)
+ || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineHttpClientSource.class.equals(Float.class)
- || ObservabilityPipelineHttpClientSource.class.equals(Double.class))
+ ((ObservabilityPipelineSplunkTcpSource.class.equals(Float.class)
+ || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineHttpClientSource.class.equals(Boolean.class)
+ (ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineHttpClientSource.class.equals(String.class)
+ (ObservabilityPipelineSplunkTcpSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
tmp =
- tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpClientSource.class);
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkTcpSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineHttpClientSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineSplunkTcpSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpClientSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkTcpSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineHttpClientSource'",
+ "Input data does not match schema 'ObservabilityPipelineSplunkTcpSource'",
e);
}
- // deserialize ObservabilityPipelineLogstashSource
+ // deserialize ObservabilityPipelineSumoLogicSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineLogstashSource.class.equals(Integer.class)
- || ObservabilityPipelineLogstashSource.class.equals(Long.class)
- || ObservabilityPipelineLogstashSource.class.equals(Float.class)
- || ObservabilityPipelineLogstashSource.class.equals(Double.class)
- || ObservabilityPipelineLogstashSource.class.equals(Boolean.class)
- || ObservabilityPipelineLogstashSource.class.equals(String.class)) {
+ if (ObservabilityPipelineSumoLogicSource.class.equals(Integer.class)
+ || ObservabilityPipelineSumoLogicSource.class.equals(Long.class)
+ || ObservabilityPipelineSumoLogicSource.class.equals(Float.class)
+ || ObservabilityPipelineSumoLogicSource.class.equals(Double.class)
+ || ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class)
+ || ObservabilityPipelineSumoLogicSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineLogstashSource.class.equals(Integer.class)
- || ObservabilityPipelineLogstashSource.class.equals(Long.class))
+ ((ObservabilityPipelineSumoLogicSource.class.equals(Integer.class)
+ || ObservabilityPipelineSumoLogicSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineLogstashSource.class.equals(Float.class)
- || ObservabilityPipelineLogstashSource.class.equals(Double.class))
+ ((ObservabilityPipelineSumoLogicSource.class.equals(Float.class)
+ || ObservabilityPipelineSumoLogicSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineLogstashSource.class.equals(Boolean.class)
+ (ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineLogstashSource.class.equals(String.class)
+ (ObservabilityPipelineSumoLogicSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineLogstashSource.class);
+ tmp =
+ tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSumoLogicSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineLogstashSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineSumoLogicSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineLogstashSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
Level.FINER,
- "Input data does not match schema 'ObservabilityPipelineLogstashSource'",
+ "Input data does not match schema 'ObservabilityPipelineSumoLogicSource'",
e);
}
- // deserialize ObservabilityPipelineSocketSource
+ // deserialize ObservabilityPipelineSyslogNgSource
try {
boolean attemptParsing = true;
// ensure that we respect type coercion as set on the client ObjectMapper
- if (ObservabilityPipelineSocketSource.class.equals(Integer.class)
- || ObservabilityPipelineSocketSource.class.equals(Long.class)
- || ObservabilityPipelineSocketSource.class.equals(Float.class)
- || ObservabilityPipelineSocketSource.class.equals(Double.class)
- || ObservabilityPipelineSocketSource.class.equals(Boolean.class)
- || ObservabilityPipelineSocketSource.class.equals(String.class)) {
+ if (ObservabilityPipelineSyslogNgSource.class.equals(Integer.class)
+ || ObservabilityPipelineSyslogNgSource.class.equals(Long.class)
+ || ObservabilityPipelineSyslogNgSource.class.equals(Float.class)
+ || ObservabilityPipelineSyslogNgSource.class.equals(Double.class)
+ || ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class)
+ || ObservabilityPipelineSyslogNgSource.class.equals(String.class)) {
attemptParsing = typeCoercion;
if (!attemptParsing) {
attemptParsing |=
- ((ObservabilityPipelineSocketSource.class.equals(Integer.class)
- || ObservabilityPipelineSocketSource.class.equals(Long.class))
+ ((ObservabilityPipelineSyslogNgSource.class.equals(Integer.class)
+ || ObservabilityPipelineSyslogNgSource.class.equals(Long.class))
&& token == JsonToken.VALUE_NUMBER_INT);
attemptParsing |=
- ((ObservabilityPipelineSocketSource.class.equals(Float.class)
- || ObservabilityPipelineSocketSource.class.equals(Double.class))
+ ((ObservabilityPipelineSyslogNgSource.class.equals(Float.class)
+ || ObservabilityPipelineSyslogNgSource.class.equals(Double.class))
&& (token == JsonToken.VALUE_NUMBER_FLOAT
|| token == JsonToken.VALUE_NUMBER_INT));
attemptParsing |=
- (ObservabilityPipelineSocketSource.class.equals(Boolean.class)
+ (ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class)
&& (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
attemptParsing |=
- (ObservabilityPipelineSocketSource.class.equals(String.class)
+ (ObservabilityPipelineSyslogNgSource.class.equals(String.class)
&& token == JsonToken.VALUE_STRING);
}
}
if (attemptParsing) {
- tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSocketSource.class);
+ tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSyslogNgSource.class);
// TODO: there is no validation against JSON schema constraints
// (min, max, enum, pattern...), this does not perform a strict JSON
// validation, which means the 'match' count may be higher than it should be.
- if (!((ObservabilityPipelineSocketSource) tmp).unparsed) {
+ if (!((ObservabilityPipelineSyslogNgSource) tmp).unparsed) {
deserialized = tmp;
match++;
}
- log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSocketSource'");
+ log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgSource'");
}
} catch (Exception e) {
// deserialization failed, continue
log.log(
- Level.FINER, "Input data does not match schema 'ObservabilityPipelineSocketSource'", e);
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineSyslogNgSource'",
+ e);
}
ObservabilityPipelineConfigSourceItem ret = new ObservabilityPipelineConfigSourceItem();
@@ -899,37 +899,37 @@ public ObservabilityPipelineConfigSourceItem() {
super("oneOf", Boolean.FALSE);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineKafkaSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineDatadogAgentSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineDatadogAgentSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonDataFirehoseSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkTcpSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonS3Source o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkHecSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentBitSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonS3Source o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentdSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentdSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineGooglePubSubSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentBitSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpClientSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
@@ -939,94 +939,94 @@ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpServerSour
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSumoLogicSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineKafkaSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineRsyslogSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineLogstashSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSyslogNgSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineRsyslogSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonDataFirehoseSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSocketSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineGooglePubSubSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkHecSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpClientSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkTcpSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineLogstashSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSumoLogicSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
- public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSocketSource o) {
+ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSyslogNgSource o) {
super("oneOf", Boolean.FALSE);
setActualInstance(o);
}
static {
- schemas.put(
- "ObservabilityPipelineKafkaSource", new GenericType() {});
schemas.put(
"ObservabilityPipelineDatadogAgentSource",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineSplunkTcpSource",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineSplunkHecSource",
- new GenericType() {});
+ "ObservabilityPipelineAmazonDataFirehoseSource",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineAmazonS3Source",
new GenericType() {});
- schemas.put(
- "ObservabilityPipelineFluentdSource",
- new GenericType() {});
schemas.put(
"ObservabilityPipelineFluentBitSource",
new GenericType() {});
schemas.put(
- "ObservabilityPipelineHttpServerSource",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineSumoLogicSource",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineRsyslogSource",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineSyslogNgSource",
- new GenericType() {});
- schemas.put(
- "ObservabilityPipelineAmazonDataFirehoseSource",
- new GenericType() {});
+ "ObservabilityPipelineFluentdSource",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineGooglePubSubSource",
new GenericType() {});
schemas.put(
"ObservabilityPipelineHttpClientSource",
new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineHttpServerSource",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineKafkaSource", new GenericType() {});
schemas.put(
"ObservabilityPipelineLogstashSource",
new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineRsyslogSource",
+ new GenericType() {});
schemas.put(
"ObservabilityPipelineSocketSource",
new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineSplunkHecSource",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineSplunkTcpSource",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineSumoLogicSource",
+ new GenericType() {});
+ schemas.put(
+ "ObservabilityPipelineSyslogNgSource",
+ new GenericType() {});
JSON.registerDescendants(
ObservabilityPipelineConfigSourceItem.class, Collections.unmodifiableMap(schemas));
}
@@ -1038,15 +1038,15 @@ public Map getSchemas() {
/**
* Set the instance that matches the oneOf child schema, check the instance parameter is valid
- * against the oneOf child schemas: ObservabilityPipelineKafkaSource,
- * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource,
- * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source,
- * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource,
- * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource,
- * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource,
- * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource,
- * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource,
- * ObservabilityPipelineSocketSource
+ * against the oneOf child schemas: ObservabilityPipelineDatadogAgentSource,
+ * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source,
+ * ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource,
+ * ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource,
+ * ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource,
+ * ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource,
+ * ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource,
+ * ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource,
+ * ObservabilityPipelineSyslogNgSource
*
* It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a
* composed schema (allOf, anyOf, oneOf).
@@ -1054,37 +1054,37 @@ public Map getSchemas() {
@Override
public void setActualInstance(Object instance) {
if (JSON.isInstanceOf(
- ObservabilityPipelineKafkaSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineDatadogAgentSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineDatadogAgentSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineAmazonDataFirehoseSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSplunkTcpSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineAmazonS3Source.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSplunkHecSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineFluentBitSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineAmazonS3Source.class, instance, new HashSet>())) {
+ ObservabilityPipelineFluentdSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineFluentdSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineGooglePubSubSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineFluentBitSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineHttpClientSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1094,42 +1094,42 @@ public void setActualInstance(Object instance) {
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSumoLogicSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineKafkaSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineRsyslogSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineLogstashSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSyslogNgSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineRsyslogSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineAmazonDataFirehoseSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineSocketSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineGooglePubSubSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineSplunkHecSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineHttpClientSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineSplunkTcpSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineLogstashSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineSumoLogicSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
if (JSON.isInstanceOf(
- ObservabilityPipelineSocketSource.class, instance, new HashSet>())) {
+ ObservabilityPipelineSyslogNgSource.class, instance, new HashSet>())) {
super.setActualInstance(instance);
return;
}
@@ -1139,55 +1139,43 @@ public void setActualInstance(Object instance) {
return;
}
throw new RuntimeException(
- "Invalid instance type. Must be ObservabilityPipelineKafkaSource,"
- + " ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource,"
- + " ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source,"
- + " ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource,"
- + " ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource,"
- + " ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource,"
- + " ObservabilityPipelineAmazonDataFirehoseSource,"
+ "Invalid instance type. Must be ObservabilityPipelineDatadogAgentSource,"
+ + " ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source,"
+ + " ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource,"
+ " ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource,"
- + " ObservabilityPipelineLogstashSource, ObservabilityPipelineSocketSource");
+ + " ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource,"
+ + " ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource,"
+ + " ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource,"
+ + " ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource,"
+ + " ObservabilityPipelineSyslogNgSource");
}
/**
- * Get the actual instance, which can be the following: ObservabilityPipelineKafkaSource,
- * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource,
- * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source,
- * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource,
- * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource,
- * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource,
- * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource,
- * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource,
- * ObservabilityPipelineSocketSource
+ * Get the actual instance, which can be the following: ObservabilityPipelineDatadogAgentSource,
+ * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source,
+ * ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource,
+ * ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource,
+ * ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource,
+ * ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource,
+ * ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource,
+ * ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource,
+ * ObservabilityPipelineSyslogNgSource
*
- * @return The actual instance (ObservabilityPipelineKafkaSource,
- * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource,
- * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source,
- * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource,
- * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource,
- * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource,
- * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource,
- * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource,
- * ObservabilityPipelineSocketSource)
+ * @return The actual instance (ObservabilityPipelineDatadogAgentSource,
+ * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source,
+ * ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource,
+ * ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource,
+ * ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource,
+ * ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource,
+ * ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource,
+ * ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource,
+ * ObservabilityPipelineSyslogNgSource)
*/
@Override
public Object getActualInstance() {
return super.getActualInstance();
}
- /**
- * Get the actual instance of `ObservabilityPipelineKafkaSource`. If the actual instance is not
- * `ObservabilityPipelineKafkaSource`, the ClassCastException will be thrown.
- *
- * @return The actual instance of `ObservabilityPipelineKafkaSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaSource`
- */
- public ObservabilityPipelineKafkaSource getObservabilityPipelineKafkaSource()
- throws ClassCastException {
- return (ObservabilityPipelineKafkaSource) super.getActualInstance();
- }
-
/**
* Get the actual instance of `ObservabilityPipelineDatadogAgentSource`. If the actual instance is
* not `ObservabilityPipelineDatadogAgentSource`, the ClassCastException will be thrown.
@@ -1201,27 +1189,17 @@ public ObservabilityPipelineDatadogAgentSource getObservabilityPipelineDatadogAg
}
/**
- * Get the actual instance of `ObservabilityPipelineSplunkTcpSource`. If the actual instance is
- * not `ObservabilityPipelineSplunkTcpSource`, the ClassCastException will be thrown.
- *
- * @return The actual instance of `ObservabilityPipelineSplunkTcpSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkTcpSource`
- */
- public ObservabilityPipelineSplunkTcpSource getObservabilityPipelineSplunkTcpSource()
- throws ClassCastException {
- return (ObservabilityPipelineSplunkTcpSource) super.getActualInstance();
- }
-
- /**
- * Get the actual instance of `ObservabilityPipelineSplunkHecSource`. If the actual instance is
- * not `ObservabilityPipelineSplunkHecSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineAmazonDataFirehoseSource`. If the actual
+ * instance is not `ObservabilityPipelineAmazonDataFirehoseSource`, the ClassCastException will be
+ * thrown.
*
- * @return The actual instance of `ObservabilityPipelineSplunkHecSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecSource`
+ * @return The actual instance of `ObservabilityPipelineAmazonDataFirehoseSource`
+ * @throws ClassCastException if the instance is not
+ * `ObservabilityPipelineAmazonDataFirehoseSource`
*/
- public ObservabilityPipelineSplunkHecSource getObservabilityPipelineSplunkHecSource()
- throws ClassCastException {
- return (ObservabilityPipelineSplunkHecSource) super.getActualInstance();
+ public ObservabilityPipelineAmazonDataFirehoseSource
+ getObservabilityPipelineAmazonDataFirehoseSource() throws ClassCastException {
+ return (ObservabilityPipelineAmazonDataFirehoseSource) super.getActualInstance();
}
/**
@@ -1236,6 +1214,18 @@ public ObservabilityPipelineAmazonS3Source getObservabilityPipelineAmazonS3Sourc
return (ObservabilityPipelineAmazonS3Source) super.getActualInstance();
}
+ /**
+ * Get the actual instance of `ObservabilityPipelineFluentBitSource`. If the actual instance is
+ * not `ObservabilityPipelineFluentBitSource`, the ClassCastException will be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineFluentBitSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineFluentBitSource`
+ */
+ public ObservabilityPipelineFluentBitSource getObservabilityPipelineFluentBitSource()
+ throws ClassCastException {
+ return (ObservabilityPipelineFluentBitSource) super.getActualInstance();
+ }
+
/**
* Get the actual instance of `ObservabilityPipelineFluentdSource`. If the actual instance is not
* `ObservabilityPipelineFluentdSource`, the ClassCastException will be thrown.
@@ -1249,15 +1239,27 @@ public ObservabilityPipelineFluentdSource getObservabilityPipelineFluentdSource(
}
/**
- * Get the actual instance of `ObservabilityPipelineFluentBitSource`. If the actual instance is
- * not `ObservabilityPipelineFluentBitSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineGooglePubSubSource`. If the actual instance is
+ * not `ObservabilityPipelineGooglePubSubSource`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineFluentBitSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineFluentBitSource`
+ * @return The actual instance of `ObservabilityPipelineGooglePubSubSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineGooglePubSubSource`
*/
- public ObservabilityPipelineFluentBitSource getObservabilityPipelineFluentBitSource()
+ public ObservabilityPipelineGooglePubSubSource getObservabilityPipelineGooglePubSubSource()
throws ClassCastException {
- return (ObservabilityPipelineFluentBitSource) super.getActualInstance();
+ return (ObservabilityPipelineGooglePubSubSource) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineHttpClientSource`. If the actual instance is
+ * not `ObservabilityPipelineHttpClientSource`, the ClassCastException will be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineHttpClientSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpClientSource`
+ */
+ public ObservabilityPipelineHttpClientSource getObservabilityPipelineHttpClientSource()
+ throws ClassCastException {
+ return (ObservabilityPipelineHttpClientSource) super.getActualInstance();
}
/**
@@ -1273,15 +1275,27 @@ public ObservabilityPipelineHttpServerSource getObservabilityPipelineHttpServerS
}
/**
- * Get the actual instance of `ObservabilityPipelineSumoLogicSource`. If the actual instance is
- * not `ObservabilityPipelineSumoLogicSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineKafkaSource`. If the actual instance is not
+ * `ObservabilityPipelineKafkaSource`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSumoLogicSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicSource`
+ * @return The actual instance of `ObservabilityPipelineKafkaSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaSource`
*/
- public ObservabilityPipelineSumoLogicSource getObservabilityPipelineSumoLogicSource()
+ public ObservabilityPipelineKafkaSource getObservabilityPipelineKafkaSource()
throws ClassCastException {
- return (ObservabilityPipelineSumoLogicSource) super.getActualInstance();
+ return (ObservabilityPipelineKafkaSource) super.getActualInstance();
+ }
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineLogstashSource`. If the actual instance is not
+ * `ObservabilityPipelineLogstashSource`, the ClassCastException will be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineLogstashSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineLogstashSource`
+ */
+ public ObservabilityPipelineLogstashSource getObservabilityPipelineLogstashSource()
+ throws ClassCastException {
+ return (ObservabilityPipelineLogstashSource) super.getActualInstance();
}
/**
@@ -1297,76 +1311,62 @@ public ObservabilityPipelineRsyslogSource getObservabilityPipelineRsyslogSource(
}
/**
- * Get the actual instance of `ObservabilityPipelineSyslogNgSource`. If the actual instance is not
- * `ObservabilityPipelineSyslogNgSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSocketSource`. If the actual instance is not
+ * `ObservabilityPipelineSocketSource`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSyslogNgSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgSource`
+ * @return The actual instance of `ObservabilityPipelineSocketSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSocketSource`
*/
- public ObservabilityPipelineSyslogNgSource getObservabilityPipelineSyslogNgSource()
+ public ObservabilityPipelineSocketSource getObservabilityPipelineSocketSource()
throws ClassCastException {
- return (ObservabilityPipelineSyslogNgSource) super.getActualInstance();
- }
-
- /**
- * Get the actual instance of `ObservabilityPipelineAmazonDataFirehoseSource`. If the actual
- * instance is not `ObservabilityPipelineAmazonDataFirehoseSource`, the ClassCastException will be
- * thrown.
- *
- * @return The actual instance of `ObservabilityPipelineAmazonDataFirehoseSource`
- * @throws ClassCastException if the instance is not
- * `ObservabilityPipelineAmazonDataFirehoseSource`
- */
- public ObservabilityPipelineAmazonDataFirehoseSource
- getObservabilityPipelineAmazonDataFirehoseSource() throws ClassCastException {
- return (ObservabilityPipelineAmazonDataFirehoseSource) super.getActualInstance();
+ return (ObservabilityPipelineSocketSource) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineGooglePubSubSource`. If the actual instance is
- * not `ObservabilityPipelineGooglePubSubSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSplunkHecSource`. If the actual instance is
+ * not `ObservabilityPipelineSplunkHecSource`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineGooglePubSubSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineGooglePubSubSource`
+ * @return The actual instance of `ObservabilityPipelineSplunkHecSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecSource`
*/
- public ObservabilityPipelineGooglePubSubSource getObservabilityPipelineGooglePubSubSource()
+ public ObservabilityPipelineSplunkHecSource getObservabilityPipelineSplunkHecSource()
throws ClassCastException {
- return (ObservabilityPipelineGooglePubSubSource) super.getActualInstance();
+ return (ObservabilityPipelineSplunkHecSource) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineHttpClientSource`. If the actual instance is
- * not `ObservabilityPipelineHttpClientSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSplunkTcpSource`. If the actual instance is
+ * not `ObservabilityPipelineSplunkTcpSource`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineHttpClientSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpClientSource`
+ * @return The actual instance of `ObservabilityPipelineSplunkTcpSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkTcpSource`
*/
- public ObservabilityPipelineHttpClientSource getObservabilityPipelineHttpClientSource()
+ public ObservabilityPipelineSplunkTcpSource getObservabilityPipelineSplunkTcpSource()
throws ClassCastException {
- return (ObservabilityPipelineHttpClientSource) super.getActualInstance();
+ return (ObservabilityPipelineSplunkTcpSource) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineLogstashSource`. If the actual instance is not
- * `ObservabilityPipelineLogstashSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSumoLogicSource`. If the actual instance is
+ * not `ObservabilityPipelineSumoLogicSource`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineLogstashSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineLogstashSource`
+ * @return The actual instance of `ObservabilityPipelineSumoLogicSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicSource`
*/
- public ObservabilityPipelineLogstashSource getObservabilityPipelineLogstashSource()
+ public ObservabilityPipelineSumoLogicSource getObservabilityPipelineSumoLogicSource()
throws ClassCastException {
- return (ObservabilityPipelineLogstashSource) super.getActualInstance();
+ return (ObservabilityPipelineSumoLogicSource) super.getActualInstance();
}
/**
- * Get the actual instance of `ObservabilityPipelineSocketSource`. If the actual instance is not
- * `ObservabilityPipelineSocketSource`, the ClassCastException will be thrown.
+ * Get the actual instance of `ObservabilityPipelineSyslogNgSource`. If the actual instance is not
+ * `ObservabilityPipelineSyslogNgSource`, the ClassCastException will be thrown.
*
- * @return The actual instance of `ObservabilityPipelineSocketSource`
- * @throws ClassCastException if the instance is not `ObservabilityPipelineSocketSource`
+ * @return The actual instance of `ObservabilityPipelineSyslogNgSource`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgSource`
*/
- public ObservabilityPipelineSocketSource getObservabilityPipelineSocketSource()
+ public ObservabilityPipelineSyslogNgSource getObservabilityPipelineSyslogNgSource()
throws ClassCastException {
- return (ObservabilityPipelineSocketSource) super.getActualInstance();
+ return (ObservabilityPipelineSyslogNgSource) super.getActualInstance();
}
}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java
index 5709d5ab340..20fb23651d9 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java
@@ -22,6 +22,8 @@
/**
* The crowdstrike_next_gen_siem destination forwards logs to CrowdStrike Next Gen
* SIEM.
+ *
+ * Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineCrowdStrikeNextGenSiemDestination.JSON_PROPERTY_COMPRESSION,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java
index 1855d3fa5ef..9e6e905d6f8 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java
@@ -23,6 +23,8 @@
* The custom_processor processor transforms events using Vector Remap Language (VRL) scripts with
* advanced filtering capabilities.
+ *
+ *
Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineCustomProcessor.JSON_PROPERTY_DISPLAY_NAME,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java
index f7a44ea620b..18c20a617f4 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The datadog_agent source collects logs from the Datadog Agent. */
+/**
+ * The datadog_agent source collects logs/metrics from the Datadog Agent.
+ *
+ *
Supported pipeline types: logs, metrics
+ */
@JsonPropertyOrder({
ObservabilityPipelineDatadogAgentSource.JSON_PROPERTY_ID,
ObservabilityPipelineDatadogAgentSource.JSON_PROPERTY_TLS,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java
index c408c2da16f..c8f06e398af 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The datadog_logs destination forwards logs to Datadog Log Management. */
+/**
+ * The datadog_logs destination forwards logs to Datadog Log Management.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineDatadogLogsDestination.JSON_PROPERTY_ID,
ObservabilityPipelineDatadogLogsDestination.JSON_PROPERTY_INPUTS,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestination.java
new file mode 100644
index 00000000000..17a7a529012
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestination.java
@@ -0,0 +1,224 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * The datadog_metrics destination forwards metrics to Datadog.
+ *
+ *
Supported pipeline types: metrics
+ */
+@JsonPropertyOrder({
+ ObservabilityPipelineDatadogMetricsDestination.JSON_PROPERTY_ID,
+ ObservabilityPipelineDatadogMetricsDestination.JSON_PROPERTY_INPUTS,
+ ObservabilityPipelineDatadogMetricsDestination.JSON_PROPERTY_TYPE
+})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelineDatadogMetricsDestination {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_ID = "id";
+ private String id;
+
+ public static final String JSON_PROPERTY_INPUTS = "inputs";
+ private List inputs = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_TYPE = "type";
+ private ObservabilityPipelineDatadogMetricsDestinationType type =
+ ObservabilityPipelineDatadogMetricsDestinationType.DATADOG_METRICS;
+
+ public ObservabilityPipelineDatadogMetricsDestination() {}
+
+ @JsonCreator
+ public ObservabilityPipelineDatadogMetricsDestination(
+ @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id,
+ @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs,
+ @JsonProperty(required = true, value = JSON_PROPERTY_TYPE)
+ ObservabilityPipelineDatadogMetricsDestinationType type) {
+ this.id = id;
+ this.inputs = inputs;
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ }
+
+ public ObservabilityPipelineDatadogMetricsDestination id(String id) {
+ this.id = id;
+ return this;
+ }
+
+ /**
+ * The unique identifier for this component.
+ *
+ * @return id
+ */
+ @JsonProperty(JSON_PROPERTY_ID)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public ObservabilityPipelineDatadogMetricsDestination inputs(List inputs) {
+ this.inputs = inputs;
+ return this;
+ }
+
+ public ObservabilityPipelineDatadogMetricsDestination addInputsItem(String inputsItem) {
+ this.inputs.add(inputsItem);
+ return this;
+ }
+
+ /**
+ * A list of component IDs whose output is used as the input for this component.
+ *
+ * @return inputs
+ */
+ @JsonProperty(JSON_PROPERTY_INPUTS)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getInputs() {
+ return inputs;
+ }
+
+ public void setInputs(List inputs) {
+ this.inputs = inputs;
+ }
+
+ public ObservabilityPipelineDatadogMetricsDestination type(
+ ObservabilityPipelineDatadogMetricsDestinationType type) {
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ return this;
+ }
+
+ /**
+ * The destination type. The value should always be datadog_metrics.
+ *
+ * @return type
+ */
+ @JsonProperty(JSON_PROPERTY_TYPE)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineDatadogMetricsDestinationType getType() {
+ return type;
+ }
+
+ public void setType(ObservabilityPipelineDatadogMetricsDestinationType type) {
+ if (!type.isValid()) {
+ this.unparsed = true;
+ }
+ this.type = type;
+ }
+
+ /**
+ * A container for additional, undeclared properties. This is a holder for any undeclared
+ * properties as specified with the 'additionalProperties' keyword in the OAS document.
+ */
+ private Map additionalProperties;
+
+ /**
+ * Set the additional (undeclared) property with the specified name and value. If the property
+ * does not already exist, create it otherwise replace it.
+ *
+ * @param key The arbitrary key to set
+ * @param value The associated value
+ * @return ObservabilityPipelineDatadogMetricsDestination
+ */
+ @JsonAnySetter
+ public ObservabilityPipelineDatadogMetricsDestination putAdditionalProperty(
+ String key, Object value) {
+ if (this.additionalProperties == null) {
+ this.additionalProperties = new HashMap();
+ }
+ this.additionalProperties.put(key, value);
+ return this;
+ }
+
+ /**
+ * Return the additional (undeclared) property.
+ *
+ * @return The additional properties
+ */
+ @JsonAnyGetter
+ public Map getAdditionalProperties() {
+ return additionalProperties;
+ }
+
+ /**
+ * Return the additional (undeclared) property with the specified name.
+ *
+ * @param key The arbitrary key to get
+ * @return The specific additional property for the given key
+ */
+ public Object getAdditionalProperty(String key) {
+ if (this.additionalProperties == null) {
+ return null;
+ }
+ return this.additionalProperties.get(key);
+ }
+
+ /** Return true if this ObservabilityPipelineDatadogMetricsDestination object is equal to o. */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ObservabilityPipelineDatadogMetricsDestination observabilityPipelineDatadogMetricsDestination =
+ (ObservabilityPipelineDatadogMetricsDestination) o;
+ return Objects.equals(this.id, observabilityPipelineDatadogMetricsDestination.id)
+ && Objects.equals(this.inputs, observabilityPipelineDatadogMetricsDestination.inputs)
+ && Objects.equals(this.type, observabilityPipelineDatadogMetricsDestination.type)
+ && Objects.equals(
+ this.additionalProperties,
+ observabilityPipelineDatadogMetricsDestination.additionalProperties);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, inputs, type, additionalProperties);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("class ObservabilityPipelineDatadogMetricsDestination {\n");
+ sb.append(" id: ").append(toIndentedString(id)).append("\n");
+ sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
+ sb.append(" type: ").append(toIndentedString(type)).append("\n");
+ sb.append(" additionalProperties: ")
+ .append(toIndentedString(additionalProperties))
+ .append("\n");
+ sb.append('}');
+ return sb.toString();
+ }
+
+ /**
+ * Convert the given object to string with each line indented by 4 spaces (except the first line).
+ */
+ private String toIndentedString(Object o) {
+ if (o == null) {
+ return "null";
+ }
+ return o.toString().replace("\n", "\n ");
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestinationType.java
new file mode 100644
index 00000000000..e4ff1e92bca
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestinationType.java
@@ -0,0 +1,63 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** The destination type. The value should always be datadog_metrics. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineDatadogMetricsDestinationType
+ .ObservabilityPipelineDatadogMetricsDestinationTypeSerializer.class)
+public class ObservabilityPipelineDatadogMetricsDestinationType extends ModelEnum {
+
+ private static final Set allowedValues =
+ new HashSet(Arrays.asList("datadog_metrics"));
+
+ public static final ObservabilityPipelineDatadogMetricsDestinationType DATADOG_METRICS =
+ new ObservabilityPipelineDatadogMetricsDestinationType("datadog_metrics");
+
+ ObservabilityPipelineDatadogMetricsDestinationType(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineDatadogMetricsDestinationTypeSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineDatadogMetricsDestinationTypeSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineDatadogMetricsDestinationTypeSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineDatadogMetricsDestinationType value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineDatadogMetricsDestinationType fromValue(String value) {
+ return new ObservabilityPipelineDatadogMetricsDestinationType(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java
index 4bd1a931dc6..8337c9980ea 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java
@@ -21,6 +21,8 @@
/**
* The datadog_tags processor includes or excludes specific Datadog tags in your logs.
+ *
+ * Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineDatadogTagsProcessor.JSON_PROPERTY_ACTION,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java
index 593b6c0ddfb..84a8c6e58e7 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The dedupe processor removes duplicate fields in log events. */
+/**
+ * The dedupe processor removes duplicate fields in log events.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_DISPLAY_NAME,
ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_ENABLED,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java
index c414b55e50e..6158f6ea303 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The elasticsearch destination writes logs to an Elasticsearch cluster. */
+/**
+ * The elasticsearch destination writes logs to an Elasticsearch cluster.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_API_VERSION,
ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_BULK_INDEX,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java
index e19f2eb045c..3ca39e54acf 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java
@@ -20,6 +20,8 @@
/**
* The enrichment_table processor enriches logs using a static CSV file or GeoIP
* database.
+ *
+ *
Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_DISPLAY_NAME,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java
index cfbd5a5b3fa..f433d07dc86 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java
@@ -18,8 +18,11 @@
import java.util.Objects;
/**
- * The filter processor allows conditional processing of logs based on a Datadog search
- * query. Logs that match the include query are passed through; others are discarded.
+ * The filter processor allows conditional processing of logs/metrics based on a
+ * Datadog search query. Logs/metrics that match the include query are passed through;
+ * others are discarded.
+ *
+ *
Supported pipeline types: logs, metrics
*/
@JsonPropertyOrder({
ObservabilityPipelineFilterProcessor.JSON_PROPERTY_DISPLAY_NAME,
@@ -132,8 +135,8 @@ public ObservabilityPipelineFilterProcessor include(String include) {
}
/**
- * A Datadog search query used to determine which logs should pass through the filter. Logs that
- * match this query continue to downstream components; others are dropped.
+ * A Datadog search query used to determine which logs/metrics should pass through the filter.
+ * Logs/metrics that match this query continue to downstream components; others are dropped.
*
* @return include
*/
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java
index 5ec1382b4be..3135fdde97a 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The fluent_bit source ingests logs from Fluent Bit. */
+/**
+ * The fluent_bit source ingests logs from Fluent Bit.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineFluentBitSource.JSON_PROPERTY_ID,
ObservabilityPipelineFluentBitSource.JSON_PROPERTY_TLS,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java
index 16111fa18ac..7ef3df02faa 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The fluentd source ingests logs from a Fluentd-compatible service. */
+/**
+ * The fluentd source ingests logs from a Fluentd-compatible service.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineFluentdSource.JSON_PROPERTY_ID,
ObservabilityPipelineFluentdSource.JSON_PROPERTY_TLS,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java
index aa333b96abb..38659631ce9 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java
@@ -23,6 +23,8 @@
* The generate_datadog_metrics processor creates custom metrics from logs and sends
* them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by log
* fields.
+ *
+ *
Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_DISPLAY_NAME,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java
index 7f5b75c361c..42e408347d2 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The google_chronicle destination sends logs to Google Chronicle. */
+/**
+ * The google_chronicle destination sends logs to Google Chronicle.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_AUTH,
ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_CUSTOMER_ID,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java
index 032593f4086..b66adc5727a 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java
@@ -22,6 +22,8 @@
/**
* The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS)
* bucket. It requires a bucket name, GCP authentication, and metadata fields.
+ *
+ *
Supported pipeline types: logs
*/
@JsonPropertyOrder({
ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_ACL,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java
index e72df907d63..877c5b6c6d4 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The google_pubsub destination publishes logs to a Google Cloud Pub/Sub topic. */
+/**
+ * The google_pubsub destination publishes logs to a Google Cloud Pub/Sub topic.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineGooglePubSubDestination.JSON_PROPERTY_AUTH,
ObservabilityPipelineGooglePubSubDestination.JSON_PROPERTY_ENCODING,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java
index 0fb06ee38a7..aa7c288698e 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription. */
+/**
+ * The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_AUTH,
ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_DECODING,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestination.java
new file mode 100644
index 00000000000..0a94ae3bc26
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestination.java
@@ -0,0 +1,350 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * The http_client destination sends data to an HTTP endpoint.
+ *
+ *
Supported pipeline types: logs, metrics
+ */
+@JsonPropertyOrder({
+ ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_AUTH_STRATEGY,
+ ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_COMPRESSION,
+ ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_ENCODING,
+ ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_ID,
+ ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_INPUTS,
+ ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_TLS,
+ ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_TYPE
+})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelineHttpClientDestination {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_AUTH_STRATEGY = "auth_strategy";
+ private ObservabilityPipelineHttpClientDestinationAuthStrategy authStrategy;
+
+ public static final String JSON_PROPERTY_COMPRESSION = "compression";
+ private ObservabilityPipelineHttpClientDestinationCompression compression;
+
+ public static final String JSON_PROPERTY_ENCODING = "encoding";
+ private ObservabilityPipelineHttpClientDestinationEncoding encoding;
+
+ public static final String JSON_PROPERTY_ID = "id";
+ private String id;
+
+ public static final String JSON_PROPERTY_INPUTS = "inputs";
+ private List inputs = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_TLS = "tls";
+ private ObservabilityPipelineTls tls;
+
+ public static final String JSON_PROPERTY_TYPE = "type";
+ private ObservabilityPipelineHttpClientDestinationType type =
+ ObservabilityPipelineHttpClientDestinationType.HTTP_CLIENT;
+
+ public ObservabilityPipelineHttpClientDestination() {}
+
+ @JsonCreator
+ public ObservabilityPipelineHttpClientDestination(
+ @JsonProperty(required = true, value = JSON_PROPERTY_ENCODING)
+ ObservabilityPipelineHttpClientDestinationEncoding encoding,
+ @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id,
+ @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs,
+ @JsonProperty(required = true, value = JSON_PROPERTY_TYPE)
+ ObservabilityPipelineHttpClientDestinationType type) {
+ this.encoding = encoding;
+ this.unparsed |= !encoding.isValid();
+ this.id = id;
+ this.inputs = inputs;
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ }
+
+ public ObservabilityPipelineHttpClientDestination authStrategy(
+ ObservabilityPipelineHttpClientDestinationAuthStrategy authStrategy) {
+ this.authStrategy = authStrategy;
+ this.unparsed |= !authStrategy.isValid();
+ return this;
+ }
+
+ /**
+ * HTTP authentication strategy.
+ *
+ * @return authStrategy
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_AUTH_STRATEGY)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineHttpClientDestinationAuthStrategy getAuthStrategy() {
+ return authStrategy;
+ }
+
+ public void setAuthStrategy(ObservabilityPipelineHttpClientDestinationAuthStrategy authStrategy) {
+ if (!authStrategy.isValid()) {
+ this.unparsed = true;
+ }
+ this.authStrategy = authStrategy;
+ }
+
+ public ObservabilityPipelineHttpClientDestination compression(
+ ObservabilityPipelineHttpClientDestinationCompression compression) {
+ this.compression = compression;
+ this.unparsed |= compression.unparsed;
+ return this;
+ }
+
+ /**
+ * Compression configuration for HTTP requests.
+ *
+ * @return compression
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_COMPRESSION)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineHttpClientDestinationCompression getCompression() {
+ return compression;
+ }
+
+ public void setCompression(ObservabilityPipelineHttpClientDestinationCompression compression) {
+ this.compression = compression;
+ }
+
+ public ObservabilityPipelineHttpClientDestination encoding(
+ ObservabilityPipelineHttpClientDestinationEncoding encoding) {
+ this.encoding = encoding;
+ this.unparsed |= !encoding.isValid();
+ return this;
+ }
+
+ /**
+ * Encoding format for log events.
+ *
+ * @return encoding
+ */
+ @JsonProperty(JSON_PROPERTY_ENCODING)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineHttpClientDestinationEncoding getEncoding() {
+ return encoding;
+ }
+
+ public void setEncoding(ObservabilityPipelineHttpClientDestinationEncoding encoding) {
+ if (!encoding.isValid()) {
+ this.unparsed = true;
+ }
+ this.encoding = encoding;
+ }
+
+ public ObservabilityPipelineHttpClientDestination id(String id) {
+ this.id = id;
+ return this;
+ }
+
+ /**
+ * The unique identifier for this component.
+ *
+ * @return id
+ */
+ @JsonProperty(JSON_PROPERTY_ID)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public ObservabilityPipelineHttpClientDestination inputs(List inputs) {
+ this.inputs = inputs;
+ return this;
+ }
+
+ public ObservabilityPipelineHttpClientDestination addInputsItem(String inputsItem) {
+ this.inputs.add(inputsItem);
+ return this;
+ }
+
+ /**
+ * A list of component IDs whose output is used as the input for this component.
+ *
+ * @return inputs
+ */
+ @JsonProperty(JSON_PROPERTY_INPUTS)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getInputs() {
+ return inputs;
+ }
+
+ public void setInputs(List inputs) {
+ this.inputs = inputs;
+ }
+
+ public ObservabilityPipelineHttpClientDestination tls(ObservabilityPipelineTls tls) {
+ this.tls = tls;
+ this.unparsed |= tls.unparsed;
+ return this;
+ }
+
+ /**
+ * Configuration for enabling TLS encryption between the pipeline component and external services.
+ *
+ * @return tls
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_TLS)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineTls getTls() {
+ return tls;
+ }
+
+ public void setTls(ObservabilityPipelineTls tls) {
+ this.tls = tls;
+ }
+
+ public ObservabilityPipelineHttpClientDestination type(
+ ObservabilityPipelineHttpClientDestinationType type) {
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ return this;
+ }
+
+ /**
+ * The destination type. The value should always be http_client.
+ *
+ * @return type
+ */
+ @JsonProperty(JSON_PROPERTY_TYPE)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineHttpClientDestinationType getType() {
+ return type;
+ }
+
+ public void setType(ObservabilityPipelineHttpClientDestinationType type) {
+ if (!type.isValid()) {
+ this.unparsed = true;
+ }
+ this.type = type;
+ }
+
+ /**
+ * A container for additional, undeclared properties. This is a holder for any undeclared
+ * properties as specified with the 'additionalProperties' keyword in the OAS document.
+ */
+ private Map additionalProperties;
+
+ /**
+ * Set the additional (undeclared) property with the specified name and value. If the property
+ * does not already exist, create it otherwise replace it.
+ *
+ * @param key The arbitrary key to set
+ * @param value The associated value
+ * @return ObservabilityPipelineHttpClientDestination
+ */
+ @JsonAnySetter
+ public ObservabilityPipelineHttpClientDestination putAdditionalProperty(
+ String key, Object value) {
+ if (this.additionalProperties == null) {
+ this.additionalProperties = new HashMap();
+ }
+ this.additionalProperties.put(key, value);
+ return this;
+ }
+
+ /**
+ * Return the additional (undeclared) property.
+ *
+ * @return The additional properties
+ */
+ @JsonAnyGetter
+ public Map getAdditionalProperties() {
+ return additionalProperties;
+ }
+
+ /**
+ * Return the additional (undeclared) property with the specified name.
+ *
+ * @param key The arbitrary key to get
+ * @return The specific additional property for the given key
+ */
+ public Object getAdditionalProperty(String key) {
+ if (this.additionalProperties == null) {
+ return null;
+ }
+ return this.additionalProperties.get(key);
+ }
+
+ /** Return true if this ObservabilityPipelineHttpClientDestination object is equal to o. */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ObservabilityPipelineHttpClientDestination observabilityPipelineHttpClientDestination =
+ (ObservabilityPipelineHttpClientDestination) o;
+ return Objects.equals(
+ this.authStrategy, observabilityPipelineHttpClientDestination.authStrategy)
+ && Objects.equals(this.compression, observabilityPipelineHttpClientDestination.compression)
+ && Objects.equals(this.encoding, observabilityPipelineHttpClientDestination.encoding)
+ && Objects.equals(this.id, observabilityPipelineHttpClientDestination.id)
+ && Objects.equals(this.inputs, observabilityPipelineHttpClientDestination.inputs)
+ && Objects.equals(this.tls, observabilityPipelineHttpClientDestination.tls)
+ && Objects.equals(this.type, observabilityPipelineHttpClientDestination.type)
+ && Objects.equals(
+ this.additionalProperties,
+ observabilityPipelineHttpClientDestination.additionalProperties);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ authStrategy, compression, encoding, id, inputs, tls, type, additionalProperties);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("class ObservabilityPipelineHttpClientDestination {\n");
+ sb.append(" authStrategy: ").append(toIndentedString(authStrategy)).append("\n");
+ sb.append(" compression: ").append(toIndentedString(compression)).append("\n");
+ sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n");
+ sb.append(" id: ").append(toIndentedString(id)).append("\n");
+ sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
+ sb.append(" tls: ").append(toIndentedString(tls)).append("\n");
+ sb.append(" type: ").append(toIndentedString(type)).append("\n");
+ sb.append(" additionalProperties: ")
+ .append(toIndentedString(additionalProperties))
+ .append("\n");
+ sb.append('}');
+ return sb.toString();
+ }
+
+ /**
+ * Convert the given object to string with each line indented by 4 spaces (except the first line).
+ */
+ private String toIndentedString(Object o) {
+ if (o == null) {
+ return "null";
+ }
+ return o.toString().replace("\n", "\n ");
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationAuthStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationAuthStrategy.java
new file mode 100644
index 00000000000..2a858d6a60c
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationAuthStrategy.java
@@ -0,0 +1,65 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** HTTP authentication strategy. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineHttpClientDestinationAuthStrategy
+ .ObservabilityPipelineHttpClientDestinationAuthStrategySerializer.class)
+public class ObservabilityPipelineHttpClientDestinationAuthStrategy extends ModelEnum {
+
+ private static final Set allowedValues =
+ new HashSet(Arrays.asList("basic", "bearer"));
+
+ public static final ObservabilityPipelineHttpClientDestinationAuthStrategy BASIC =
+ new ObservabilityPipelineHttpClientDestinationAuthStrategy("basic");
+ public static final ObservabilityPipelineHttpClientDestinationAuthStrategy BEARER =
+ new ObservabilityPipelineHttpClientDestinationAuthStrategy("bearer");
+
+ ObservabilityPipelineHttpClientDestinationAuthStrategy(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineHttpClientDestinationAuthStrategySerializer
+ extends StdSerializer {
+ public ObservabilityPipelineHttpClientDestinationAuthStrategySerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineHttpClientDestinationAuthStrategySerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineHttpClientDestinationAuthStrategy value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineHttpClientDestinationAuthStrategy fromValue(String value) {
+ return new ObservabilityPipelineHttpClientDestinationAuthStrategy(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompression.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompression.java
new file mode 100644
index 00000000000..839d55c5f8f
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompression.java
@@ -0,0 +1,159 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+/** Compression configuration for HTTP requests. */
+@JsonPropertyOrder({ObservabilityPipelineHttpClientDestinationCompression.JSON_PROPERTY_ALGORITHM})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelineHttpClientDestinationCompression {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_ALGORITHM = "algorithm";
+ private ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm;
+
+ public ObservabilityPipelineHttpClientDestinationCompression() {}
+
+ @JsonCreator
+ public ObservabilityPipelineHttpClientDestinationCompression(
+ @JsonProperty(required = true, value = JSON_PROPERTY_ALGORITHM)
+ ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm) {
+ this.algorithm = algorithm;
+ this.unparsed |= !algorithm.isValid();
+ }
+
+ public ObservabilityPipelineHttpClientDestinationCompression algorithm(
+ ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm) {
+ this.algorithm = algorithm;
+ this.unparsed |= !algorithm.isValid();
+ return this;
+ }
+
+ /**
+ * Compression algorithm.
+ *
+ * @return algorithm
+ */
+ @JsonProperty(JSON_PROPERTY_ALGORITHM)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineHttpClientDestinationCompressionAlgorithm getAlgorithm() {
+ return algorithm;
+ }
+
+ public void setAlgorithm(
+ ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm) {
+ if (!algorithm.isValid()) {
+ this.unparsed = true;
+ }
+ this.algorithm = algorithm;
+ }
+
+ /**
+ * A container for additional, undeclared properties. This is a holder for any undeclared
+ * properties as specified with the 'additionalProperties' keyword in the OAS document.
+ */
+ private Map additionalProperties;
+
+ /**
+ * Set the additional (undeclared) property with the specified name and value. If the property
+ * does not already exist, create it otherwise replace it.
+ *
+ * @param key The arbitrary key to set
+ * @param value The associated value
+ * @return ObservabilityPipelineHttpClientDestinationCompression
+ */
+ @JsonAnySetter
+ public ObservabilityPipelineHttpClientDestinationCompression putAdditionalProperty(
+ String key, Object value) {
+ if (this.additionalProperties == null) {
+ this.additionalProperties = new HashMap();
+ }
+ this.additionalProperties.put(key, value);
+ return this;
+ }
+
+ /**
+ * Return the additional (undeclared) property.
+ *
+ * @return The additional properties
+ */
+ @JsonAnyGetter
+ public Map getAdditionalProperties() {
+ return additionalProperties;
+ }
+
+ /**
+ * Return the additional (undeclared) property with the specified name.
+ *
+ * @param key The arbitrary key to get
+ * @return The specific additional property for the given key
+ */
+ public Object getAdditionalProperty(String key) {
+ if (this.additionalProperties == null) {
+ return null;
+ }
+ return this.additionalProperties.get(key);
+ }
+
+ /**
+ * Return true if this ObservabilityPipelineHttpClientDestinationCompression object is equal to o.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ObservabilityPipelineHttpClientDestinationCompression
+ observabilityPipelineHttpClientDestinationCompression =
+ (ObservabilityPipelineHttpClientDestinationCompression) o;
+ return Objects.equals(
+ this.algorithm, observabilityPipelineHttpClientDestinationCompression.algorithm)
+ && Objects.equals(
+ this.additionalProperties,
+ observabilityPipelineHttpClientDestinationCompression.additionalProperties);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(algorithm, additionalProperties);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("class ObservabilityPipelineHttpClientDestinationCompression {\n");
+ sb.append(" algorithm: ").append(toIndentedString(algorithm)).append("\n");
+ sb.append(" additionalProperties: ")
+ .append(toIndentedString(additionalProperties))
+ .append("\n");
+ sb.append('}');
+ return sb.toString();
+ }
+
+ /**
+ * Convert the given object to string with each line indented by 4 spaces (except the first line).
+ */
+ private String toIndentedString(Object o) {
+ if (o == null) {
+ return "null";
+ }
+ return o.toString().replace("\n", "\n ");
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.java
new file mode 100644
index 00000000000..f8cd66fb4f6
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.java
@@ -0,0 +1,64 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** Compression algorithm. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineHttpClientDestinationCompressionAlgorithm
+ .ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer.class)
+public class ObservabilityPipelineHttpClientDestinationCompressionAlgorithm
+ extends ModelEnum {
+
+ private static final Set allowedValues = new HashSet(Arrays.asList("gzip"));
+
+ public static final ObservabilityPipelineHttpClientDestinationCompressionAlgorithm GZIP =
+ new ObservabilityPipelineHttpClientDestinationCompressionAlgorithm("gzip");
+
+ ObservabilityPipelineHttpClientDestinationCompressionAlgorithm(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineHttpClientDestinationCompressionAlgorithm value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineHttpClientDestinationCompressionAlgorithm fromValue(
+ String value) {
+ return new ObservabilityPipelineHttpClientDestinationCompressionAlgorithm(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationEncoding.java
new file mode 100644
index 00000000000..21ae289f9dd
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationEncoding.java
@@ -0,0 +1,62 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** Encoding format for log events. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineHttpClientDestinationEncoding
+ .ObservabilityPipelineHttpClientDestinationEncodingSerializer.class)
+public class ObservabilityPipelineHttpClientDestinationEncoding extends ModelEnum {
+
+ private static final Set allowedValues = new HashSet(Arrays.asList("json"));
+
+ public static final ObservabilityPipelineHttpClientDestinationEncoding JSON =
+ new ObservabilityPipelineHttpClientDestinationEncoding("json");
+
+ ObservabilityPipelineHttpClientDestinationEncoding(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineHttpClientDestinationEncodingSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineHttpClientDestinationEncodingSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineHttpClientDestinationEncodingSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineHttpClientDestinationEncoding value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineHttpClientDestinationEncoding fromValue(String value) {
+ return new ObservabilityPipelineHttpClientDestinationEncoding(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationType.java
new file mode 100644
index 00000000000..992b5925d6b
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationType.java
@@ -0,0 +1,63 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** The destination type. The value should always be http_client. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineHttpClientDestinationType
+ .ObservabilityPipelineHttpClientDestinationTypeSerializer.class)
+public class ObservabilityPipelineHttpClientDestinationType extends ModelEnum {
+
+ private static final Set allowedValues =
+ new HashSet(Arrays.asList("http_client"));
+
+ public static final ObservabilityPipelineHttpClientDestinationType HTTP_CLIENT =
+ new ObservabilityPipelineHttpClientDestinationType("http_client");
+
+ ObservabilityPipelineHttpClientDestinationType(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineHttpClientDestinationTypeSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineHttpClientDestinationTypeSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineHttpClientDestinationTypeSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineHttpClientDestinationType value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineHttpClientDestinationType fromValue(String value) {
+ return new ObservabilityPipelineHttpClientDestinationType(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java
index 9cabf897e0a..3b1c3a9a1b4 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The http_client source scrapes logs from HTTP endpoints at regular intervals. */
+/**
+ * The http_client source scrapes logs from HTTP endpoints at regular intervals.
+ *
+ * Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineHttpClientSource.JSON_PROPERTY_AUTH_STRATEGY,
ObservabilityPipelineHttpClientSource.JSON_PROPERTY_DECODING,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java
index a3b47a71c7e..bcd3e06767e 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The http_server source collects logs over HTTP POST from external services. */
+/**
+ * The http_server source collects logs over HTTP POST from external services.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineHttpServerSource.JSON_PROPERTY_AUTH_STRATEGY,
ObservabilityPipelineHttpServerSource.JSON_PROPERTY_DECODING,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java
index d8c3ea6254d..2718a0192d0 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java
@@ -19,7 +19,11 @@
import java.util.Map;
import java.util.Objects;
-/** The kafka source ingests data from Apache Kafka topics. */
+/**
+ * The kafka source ingests data from Apache Kafka topics.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineKafkaSource.JSON_PROPERTY_GROUP_ID,
ObservabilityPipelineKafkaSource.JSON_PROPERTY_ID,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java
index 18a531220ef..4dfe9cdf425 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java
@@ -17,7 +17,11 @@
import java.util.Map;
import java.util.Objects;
-/** The logstash source ingests logs from a Logstash forwarder. */
+/**
+ * The logstash source ingests logs from a Logstash forwarder.
+ *
+ *
Supported pipeline types: logs
+ */
@JsonPropertyOrder({
ObservabilityPipelineLogstashSource.JSON_PROPERTY_ID,
ObservabilityPipelineLogstashSource.JSON_PROPERTY_TLS,
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessor.java
new file mode 100644
index 00000000000..630d4f6fcf0
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessor.java
@@ -0,0 +1,288 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * The metric_tags processor filters metrics based on their tags using Datadog tag key
+ * patterns.
+ *
+ *
Supported pipeline types: metrics
+ */
+@JsonPropertyOrder({
+ ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_ENABLED,
+ ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_ID,
+ ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_INCLUDE,
+ ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_RULES,
+ ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_TYPE
+})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelineMetricTagsProcessor {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_ENABLED = "enabled";
+ private Boolean enabled;
+
+ public static final String JSON_PROPERTY_ID = "id";
+ private String id;
+
+ public static final String JSON_PROPERTY_INCLUDE = "include";
+ private String include;
+
+ public static final String JSON_PROPERTY_RULES = "rules";
+ private List rules = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_TYPE = "type";
+ private ObservabilityPipelineMetricTagsProcessorType type =
+ ObservabilityPipelineMetricTagsProcessorType.METRIC_TAGS;
+
+ public ObservabilityPipelineMetricTagsProcessor() {}
+
+ @JsonCreator
+ public ObservabilityPipelineMetricTagsProcessor(
+ @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled,
+ @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id,
+ @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include,
+ @JsonProperty(required = true, value = JSON_PROPERTY_RULES)
+ List rules,
+ @JsonProperty(required = true, value = JSON_PROPERTY_TYPE)
+ ObservabilityPipelineMetricTagsProcessorType type) {
+ this.enabled = enabled;
+ this.id = id;
+ this.include = include;
+ this.rules = rules;
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ }
+
+ public ObservabilityPipelineMetricTagsProcessor enabled(Boolean enabled) {
+ this.enabled = enabled;
+ return this;
+ }
+
+ /**
+ * Whether this processor is enabled.
+ *
+ * @return enabled
+ */
+ @JsonProperty(JSON_PROPERTY_ENABLED)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public Boolean getEnabled() {
+ return enabled;
+ }
+
+ public void setEnabled(Boolean enabled) {
+ this.enabled = enabled;
+ }
+
+ public ObservabilityPipelineMetricTagsProcessor id(String id) {
+ this.id = id;
+ return this;
+ }
+
+ /**
+ * The unique identifier for this component. Used to reference this component in other parts of
+ * the pipeline (for example, as the input to downstream components).
+ *
+ * @return id
+ */
+ @JsonProperty(JSON_PROPERTY_ID)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public ObservabilityPipelineMetricTagsProcessor include(String include) {
+ this.include = include;
+ return this;
+ }
+
+ /**
+ * A Datadog search query used to determine which metrics this processor targets.
+ *
+ * @return include
+ */
+ @JsonProperty(JSON_PROPERTY_INCLUDE)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getInclude() {
+ return include;
+ }
+
+ public void setInclude(String include) {
+ this.include = include;
+ }
+
+ public ObservabilityPipelineMetricTagsProcessor rules(
+ List rules) {
+ this.rules = rules;
+ for (ObservabilityPipelineMetricTagsProcessorRule item : rules) {
+ this.unparsed |= item.unparsed;
+ }
+ return this;
+ }
+
+ public ObservabilityPipelineMetricTagsProcessor addRulesItem(
+ ObservabilityPipelineMetricTagsProcessorRule rulesItem) {
+ this.rules.add(rulesItem);
+ this.unparsed |= rulesItem.unparsed;
+ return this;
+ }
+
+ /**
+ * A list of rules for filtering metric tags.
+ *
+ * @return rules
+ */
+ @JsonProperty(JSON_PROPERTY_RULES)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List