From 4b3b1440cac1e2ef5b03fa521c7c1ebca3de14c4 Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Tue, 30 Dec 2025 14:17:30 +0000 Subject: [PATCH] Regenerate client from commit 7dc20ad of spec repo --- .generator/schemas/v2/openapi.yaml | 217 +++++-- ...rvabilityPipelineCloudPremDestination.java | 219 +++++++ ...ilityPipelineCloudPremDestinationType.java | 62 ++ ...vabilityPipelineConfigDestinationItem.java | 197 +++++- ...ObservabilityPipelineKafkaDestination.java | 595 ++++++++++++++++++ ...tyPipelineKafkaDestinationCompression.java | 71 +++ ...ilityPipelineKafkaDestinationEncoding.java | 65 ++ ...rvabilityPipelineKafkaDestinationType.java | 62 ++ ...abilityPipelineKafkaLibrdkafkaOption.java} | 35 +- ...va => ObservabilityPipelineKafkaSasl.java} | 30 +- ...servabilityPipelineKafkaSaslMechanism.java | 67 ++ .../ObservabilityPipelineKafkaSource.java | 20 +- ...elinePipelineKafkaSourceSaslMechanism.java | 67 -- 13 files changed, 1538 insertions(+), 169 deletions(-) create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java rename src/main/java/com/datadog/api/client/v2/model/{ObservabilityPipelineKafkaSourceLibrdkafkaOption.java => ObservabilityPipelineKafkaLibrdkafkaOption.java} (78%) rename src/main/java/com/datadog/api/client/v2/model/{ObservabilityPipelineKafkaSourceSasl.java => ObservabilityPipelineKafkaSasl.java} (78%) create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java delete mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b74841..f81d82c0f21 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -35455,6 +35455,37 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: The `cloud_prem` destination sends logs to Datadog CloudPrem. + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35510,6 +35541,7 @@ components: description: A destination for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' @@ -35528,6 +35560,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -36826,6 +36859,151 @@ components: type: string x-enum-varnames: - HTTP_SERVER + ObservabilityPipelineKafkaDestination: + description: The `kafka` destination sends logs to Apache Kafka topics. + properties: + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers + type: string + id: + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id + type: string + librdkafka_options: + description: Optional list of advanced Kafka producer configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' + required: + - id + - type + - inputs + - topic + - encoding + type: object + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' + type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 ObservabilityPipelineKafkaSource: description: The `kafka` source ingests data from Apache Kafka topics. properties: @@ -36843,10 +37021,10 @@ components: description: Optional list of advanced Kafka client configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' topics: @@ -36866,30 +37044,6 @@ components: - group_id - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. - properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' - type: string - required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' - type: object ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -37279,17 +37433,6 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. - enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 - type: string - x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 ObservabilityPipelineQuotaProcessor: description: The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java new file mode 100644 index 00000000000..fb9d1626d06 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java @@ -0,0 +1,219 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The cloud_prem destination sends logs to Datadog CloudPrem. */ +@JsonPropertyOrder({ + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_ID, + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineCloudPremDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineCloudPremDestinationType type = + ObservabilityPipelineCloudPremDestinationType.CLOUD_PREM; + + public ObservabilityPipelineCloudPremDestination() {} + + @JsonCreator + public ObservabilityPipelineCloudPremDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineCloudPremDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineCloudPremDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineCloudPremDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineCloudPremDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineCloudPremDestination type( + ObservabilityPipelineCloudPremDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be cloud_prem. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineCloudPremDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineCloudPremDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineCloudPremDestination + */ + @JsonAnySetter + public ObservabilityPipelineCloudPremDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineCloudPremDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineCloudPremDestination observabilityPipelineCloudPremDestination = + (ObservabilityPipelineCloudPremDestination) o; + return Objects.equals(this.id, observabilityPipelineCloudPremDestination.id) + && Objects.equals(this.inputs, observabilityPipelineCloudPremDestination.inputs) + && Objects.equals(this.type, observabilityPipelineCloudPremDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineCloudPremDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineCloudPremDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java new file mode 100644 index 00000000000..e656648144e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be cloud_prem. */ +@JsonSerialize( + using = + ObservabilityPipelineCloudPremDestinationType + .ObservabilityPipelineCloudPremDestinationTypeSerializer.class) +public class ObservabilityPipelineCloudPremDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("cloud_prem")); + + public static final ObservabilityPipelineCloudPremDestinationType CLOUD_PREM = + new ObservabilityPipelineCloudPremDestinationType("cloud_prem"); + + ObservabilityPipelineCloudPremDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineCloudPremDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineCloudPremDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineCloudPremDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineCloudPremDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineCloudPremDestinationType fromValue(String value) { + return new ObservabilityPipelineCloudPremDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java index 2f4e68bdef4..8e45b1020e8 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java @@ -142,6 +142,57 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } + // deserialize ObservabilityPipelineCloudPremDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineCloudPremDestination.class.equals(Integer.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Long.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Float.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Double.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Boolean.class) + || ObservabilityPipelineCloudPremDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineCloudPremDestination.class.equals(Integer.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineCloudPremDestination.class.equals(Float.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineCloudPremDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineCloudPremDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineCloudPremDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineCloudPremDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineCloudPremDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineCloudPremDestination'", + e); + } + // deserialize ObservabilityPipelineAmazonS3Destination try { boolean attemptParsing = true; @@ -1067,6 +1118,55 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } + // deserialize ObservabilityPipelineKafkaDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineKafkaDestination.class.equals(Integer.class) + || ObservabilityPipelineKafkaDestination.class.equals(Long.class) + || ObservabilityPipelineKafkaDestination.class.equals(Float.class) + || ObservabilityPipelineKafkaDestination.class.equals(Double.class) + || ObservabilityPipelineKafkaDestination.class.equals(Boolean.class) + || ObservabilityPipelineKafkaDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineKafkaDestination.class.equals(Integer.class) + || ObservabilityPipelineKafkaDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineKafkaDestination.class.equals(Float.class) + || ObservabilityPipelineKafkaDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineKafkaDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineKafkaDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineKafkaDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineKafkaDestination'", + e); + } + ObservabilityPipelineConfigDestinationItem ret = new ObservabilityPipelineConfigDestinationItem(); if (match == 1) { @@ -1103,6 +1203,11 @@ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLo setActualInstance(o); } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineCloudPremDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineAmazonS3Destination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); @@ -1200,10 +1305,18 @@ public ObservabilityPipelineConfigDestinationItem( setActualInstance(o); } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineKafkaDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineDatadogLogsDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineCloudPremDestination", + new GenericType() {}); schemas.put( "ObservabilityPipelineAmazonS3Destination", new GenericType() {}); @@ -1254,6 +1367,9 @@ public ObservabilityPipelineConfigDestinationItem( schemas.put( "ObservabilityPipelineGooglePubSubDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineKafkaDestination", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigDestinationItem.class, Collections.unmodifiableMap(schemas)); } @@ -1266,16 +1382,17 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination, - * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, - * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, - * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, - * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, - * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, - * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, - * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination, - * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination, + * ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, + * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination, + * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination, + * AzureStorageDestination, MicrosoftSentinelDestination, + * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination, + * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, + * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, + * ObservabilityPipelineAmazonSecurityLakeDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1287,6 +1404,11 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineCloudPremDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf( ObservabilityPipelineAmazonS3Destination.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -1383,6 +1505,11 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineKafkaDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -1390,6 +1517,7 @@ public void setActualInstance(Object instance) { } throw new RuntimeException( "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination," + + " ObservabilityPipelineCloudPremDestination," + " ObservabilityPipelineAmazonS3Destination," + " ObservabilityPipelineGoogleCloudStorageDestination," + " ObservabilityPipelineSplunkHecDestination," @@ -1405,25 +1533,26 @@ public void setActualInstance(Object instance) { + " ObservabilityPipelineSocketDestination," + " ObservabilityPipelineAmazonSecurityLakeDestination," + " ObservabilityPipelineCrowdStrikeNextGenSiemDestination," - + " ObservabilityPipelineGooglePubSubDestination"); + + " ObservabilityPipelineGooglePubSubDestination," + + " ObservabilityPipelineKafkaDestination"); } /** * Get the actual instance, which can be the following: - * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineAmazonS3Destination, - * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, - * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination, - * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination, - * AzureStorageDestination, MicrosoftSentinelDestination, - * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination, - * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, - * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, - * ObservabilityPipelineAmazonSecurityLakeDestination, + * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineCloudPremDestination, + * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, + * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, + * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, + * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, + * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, + * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination, + * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination * * @return The actual instance (ObservabilityPipelineDatadogLogsDestination, - * ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination, * ObservabilityPipelineGoogleCloudStorageDestination, * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, @@ -1434,7 +1563,7 @@ public void setActualInstance(Object instance) { * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, * ObservabilityPipelineAmazonSecurityLakeDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination) + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination) */ @Override public Object getActualInstance() { @@ -1454,6 +1583,18 @@ public Object getActualInstance() { return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance(); } + /** + * Get the actual instance of `ObservabilityPipelineCloudPremDestination`. If the actual instance + * is not `ObservabilityPipelineCloudPremDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineCloudPremDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineCloudPremDestination` + */ + public ObservabilityPipelineCloudPremDestination getObservabilityPipelineCloudPremDestination() + throws ClassCastException { + return (ObservabilityPipelineCloudPremDestination) super.getActualInstance(); + } + /** * Get the actual instance of `ObservabilityPipelineAmazonS3Destination`. If the actual instance * is not `ObservabilityPipelineAmazonS3Destination`, the ClassCastException will be thrown. @@ -1682,4 +1823,16 @@ public ObservabilityPipelineSocketDestination getObservabilityPipelineSocketDest getObservabilityPipelineGooglePubSubDestination() throws ClassCastException { return (ObservabilityPipelineGooglePubSubDestination) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineKafkaDestination`. If the actual instance is + * not `ObservabilityPipelineKafkaDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineKafkaDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaDestination` + */ + public ObservabilityPipelineKafkaDestination getObservabilityPipelineKafkaDestination() + throws ClassCastException { + return (ObservabilityPipelineKafkaDestination) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java new file mode 100644 index 00000000000..2f731674a97 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java @@ -0,0 +1,595 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The kafka destination sends logs to Apache Kafka topics. */ +@JsonPropertyOrder({ + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_COMPRESSION, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_HEADERS_KEY, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ID, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_KEY_FIELD, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_LIBRDKAFKA_OPTIONS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_MESSAGE_TIMEOUT_MS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_DURATION_SECS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_NUM, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SASL, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SOCKET_TIMEOUT_MS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TLS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TOPIC, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineKafkaDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_COMPRESSION = "compression"; + private ObservabilityPipelineKafkaDestinationCompression compression; + + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineKafkaDestinationEncoding encoding; + + public static final String JSON_PROPERTY_HEADERS_KEY = "headers_key"; + private String headersKey; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEY_FIELD = "key_field"; + private String keyField; + + public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options"; + private List librdkafkaOptions = null; + + public static final String JSON_PROPERTY_MESSAGE_TIMEOUT_MS = "message_timeout_ms"; + private Long messageTimeoutMs; + + public static final String JSON_PROPERTY_RATE_LIMIT_DURATION_SECS = "rate_limit_duration_secs"; + private Long rateLimitDurationSecs; + + public static final String JSON_PROPERTY_RATE_LIMIT_NUM = "rate_limit_num"; + private Long rateLimitNum; + + public static final String JSON_PROPERTY_SASL = "sasl"; + private ObservabilityPipelineKafkaSasl sasl; + + public static final String JSON_PROPERTY_SOCKET_TIMEOUT_MS = "socket_timeout_ms"; + private Long socketTimeoutMs; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TOPIC = "topic"; + private String topic; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineKafkaDestinationType type = + ObservabilityPipelineKafkaDestinationType.KAFKA; + + public ObservabilityPipelineKafkaDestination() {} + + @JsonCreator + public ObservabilityPipelineKafkaDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ENCODING) + ObservabilityPipelineKafkaDestinationEncoding encoding, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TOPIC) String topic, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineKafkaDestinationType type) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + this.id = id; + this.inputs = inputs; + this.topic = topic; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineKafkaDestination compression( + ObservabilityPipelineKafkaDestinationCompression compression) { + this.compression = compression; + this.unparsed |= !compression.isValid(); + return this; + } + + /** + * Compression codec for Kafka messages. + * + * @return compression + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_COMPRESSION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineKafkaDestinationCompression getCompression() { + return compression; + } + + public void setCompression(ObservabilityPipelineKafkaDestinationCompression compression) { + if (!compression.isValid()) { + this.unparsed = true; + } + this.compression = compression; + } + + public ObservabilityPipelineKafkaDestination encoding( + ObservabilityPipelineKafkaDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * Encoding format for log events. + * + * @return encoding + */ + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineKafkaDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineKafkaDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineKafkaDestination headersKey(String headersKey) { + this.headersKey = headersKey; + return this; + } + + /** + * The field name to use for Kafka message headers. + * + * @return headersKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HEADERS_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHeadersKey() { + return headersKey; + } + + public void setHeadersKey(String headersKey) { + this.headersKey = headersKey; + } + + public ObservabilityPipelineKafkaDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineKafkaDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineKafkaDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineKafkaDestination keyField(String keyField) { + this.keyField = keyField; + return this; + } + + /** + * The field name to use as the Kafka message key. + * + * @return keyField + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEY_FIELD) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getKeyField() { + return keyField; + } + + public void setKeyField(String keyField) { + this.keyField = keyField; + } + + public ObservabilityPipelineKafkaDestination librdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineKafkaDestination addLibrdkafkaOptionsItem( + ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) { + if (this.librdkafkaOptions == null) { + this.librdkafkaOptions = new ArrayList<>(); + } + this.librdkafkaOptions.add(librdkafkaOptionsItem); + this.unparsed |= librdkafkaOptionsItem.unparsed; + return this; + } + + /** + * Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + * + * @return librdkafkaOptions + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getLibrdkafkaOptions() { + return librdkafkaOptions; + } + + public void setLibrdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + } + + public ObservabilityPipelineKafkaDestination messageTimeoutMs(Long messageTimeoutMs) { + this.messageTimeoutMs = messageTimeoutMs; + return this; + } + + /** + * Maximum time in milliseconds to wait for message delivery confirmation. minimum: 1 + * + * @return messageTimeoutMs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_MESSAGE_TIMEOUT_MS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getMessageTimeoutMs() { + return messageTimeoutMs; + } + + public void setMessageTimeoutMs(Long messageTimeoutMs) { + this.messageTimeoutMs = messageTimeoutMs; + } + + public ObservabilityPipelineKafkaDestination rateLimitDurationSecs(Long rateLimitDurationSecs) { + this.rateLimitDurationSecs = rateLimitDurationSecs; + return this; + } + + /** + * Duration in seconds for the rate limit window. minimum: 1 + * + * @return rateLimitDurationSecs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_RATE_LIMIT_DURATION_SECS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getRateLimitDurationSecs() { + return rateLimitDurationSecs; + } + + public void setRateLimitDurationSecs(Long rateLimitDurationSecs) { + this.rateLimitDurationSecs = rateLimitDurationSecs; + } + + public ObservabilityPipelineKafkaDestination rateLimitNum(Long rateLimitNum) { + this.rateLimitNum = rateLimitNum; + return this; + } + + /** + * Maximum number of messages allowed per rate limit duration. minimum: 1 + * + * @return rateLimitNum + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_RATE_LIMIT_NUM) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getRateLimitNum() { + return rateLimitNum; + } + + public void setRateLimitNum(Long rateLimitNum) { + this.rateLimitNum = rateLimitNum; + } + + public ObservabilityPipelineKafkaDestination sasl(ObservabilityPipelineKafkaSasl sasl) { + this.sasl = sasl; + this.unparsed |= sasl.unparsed; + return this; + } + + /** + * Specifies the SASL mechanism for authenticating with a Kafka cluster. + * + * @return sasl + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SASL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineKafkaSasl getSasl() { + return sasl; + } + + public void setSasl(ObservabilityPipelineKafkaSasl sasl) { + this.sasl = sasl; + } + + public ObservabilityPipelineKafkaDestination socketTimeoutMs(Long socketTimeoutMs) { + this.socketTimeoutMs = socketTimeoutMs; + return this; + } + + /** + * Socket timeout in milliseconds for network requests. minimum: 10 maximum: 300000 + * + * @return socketTimeoutMs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SOCKET_TIMEOUT_MS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getSocketTimeoutMs() { + return socketTimeoutMs; + } + + public void setSocketTimeoutMs(Long socketTimeoutMs) { + this.socketTimeoutMs = socketTimeoutMs; + } + + public ObservabilityPipelineKafkaDestination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineKafkaDestination topic(String topic) { + this.topic = topic; + return this; + } + + /** + * The Kafka topic name to publish logs to. + * + * @return topic + */ + @JsonProperty(JSON_PROPERTY_TOPIC) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTopic() { + return topic; + } + + public void setTopic(String topic) { + this.topic = topic; + } + + public ObservabilityPipelineKafkaDestination type( + ObservabilityPipelineKafkaDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be kafka. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineKafkaDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineKafkaDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineKafkaDestination + */ + @JsonAnySetter + public ObservabilityPipelineKafkaDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineKafkaDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineKafkaDestination observabilityPipelineKafkaDestination = + (ObservabilityPipelineKafkaDestination) o; + return Objects.equals(this.compression, observabilityPipelineKafkaDestination.compression) + && Objects.equals(this.encoding, observabilityPipelineKafkaDestination.encoding) + && Objects.equals(this.headersKey, observabilityPipelineKafkaDestination.headersKey) + && Objects.equals(this.id, observabilityPipelineKafkaDestination.id) + && Objects.equals(this.inputs, observabilityPipelineKafkaDestination.inputs) + && Objects.equals(this.keyField, observabilityPipelineKafkaDestination.keyField) + && Objects.equals( + this.librdkafkaOptions, observabilityPipelineKafkaDestination.librdkafkaOptions) + && Objects.equals( + this.messageTimeoutMs, observabilityPipelineKafkaDestination.messageTimeoutMs) + && Objects.equals( + this.rateLimitDurationSecs, observabilityPipelineKafkaDestination.rateLimitDurationSecs) + && Objects.equals(this.rateLimitNum, observabilityPipelineKafkaDestination.rateLimitNum) + && Objects.equals(this.sasl, observabilityPipelineKafkaDestination.sasl) + && Objects.equals( + this.socketTimeoutMs, observabilityPipelineKafkaDestination.socketTimeoutMs) + && Objects.equals(this.tls, observabilityPipelineKafkaDestination.tls) + && Objects.equals(this.topic, observabilityPipelineKafkaDestination.topic) + && Objects.equals(this.type, observabilityPipelineKafkaDestination.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineKafkaDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + compression, + encoding, + headersKey, + id, + inputs, + keyField, + librdkafkaOptions, + messageTimeoutMs, + rateLimitDurationSecs, + rateLimitNum, + sasl, + socketTimeoutMs, + tls, + topic, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineKafkaDestination {\n"); + sb.append(" compression: ").append(toIndentedString(compression)).append("\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" headersKey: ").append(toIndentedString(headersKey)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keyField: ").append(toIndentedString(keyField)).append("\n"); + sb.append(" librdkafkaOptions: ").append(toIndentedString(librdkafkaOptions)).append("\n"); + sb.append(" messageTimeoutMs: ").append(toIndentedString(messageTimeoutMs)).append("\n"); + sb.append(" rateLimitDurationSecs: ") + .append(toIndentedString(rateLimitDurationSecs)) + .append("\n"); + sb.append(" rateLimitNum: ").append(toIndentedString(rateLimitNum)).append("\n"); + sb.append(" sasl: ").append(toIndentedString(sasl)).append("\n"); + sb.append(" socketTimeoutMs: ").append(toIndentedString(socketTimeoutMs)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" topic: ").append(toIndentedString(topic)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java new file mode 100644 index 00000000000..0a66e9a713f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java @@ -0,0 +1,71 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Compression codec for Kafka messages. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationCompression + .ObservabilityPipelineKafkaDestinationCompressionSerializer.class) +public class ObservabilityPipelineKafkaDestinationCompression extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("none", "gzip", "snappy", "lz4", "zstd")); + + public static final ObservabilityPipelineKafkaDestinationCompression NONE = + new ObservabilityPipelineKafkaDestinationCompression("none"); + public static final ObservabilityPipelineKafkaDestinationCompression GZIP = + new ObservabilityPipelineKafkaDestinationCompression("gzip"); + public static final ObservabilityPipelineKafkaDestinationCompression SNAPPY = + new ObservabilityPipelineKafkaDestinationCompression("snappy"); + public static final ObservabilityPipelineKafkaDestinationCompression LZ4 = + new ObservabilityPipelineKafkaDestinationCompression("lz4"); + public static final ObservabilityPipelineKafkaDestinationCompression ZSTD = + new ObservabilityPipelineKafkaDestinationCompression("zstd"); + + ObservabilityPipelineKafkaDestinationCompression(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationCompressionSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationCompressionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationCompressionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationCompression value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationCompression fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationCompression(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java new file mode 100644 index 00000000000..57abf5f828e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Encoding format for log events. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationEncoding + .ObservabilityPipelineKafkaDestinationEncodingSerializer.class) +public class ObservabilityPipelineKafkaDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("json", "raw_message")); + + public static final ObservabilityPipelineKafkaDestinationEncoding JSON = + new ObservabilityPipelineKafkaDestinationEncoding("json"); + public static final ObservabilityPipelineKafkaDestinationEncoding RAW_MESSAGE = + new ObservabilityPipelineKafkaDestinationEncoding("raw_message"); + + ObservabilityPipelineKafkaDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java new file mode 100644 index 00000000000..0967bbb11c5 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be kafka. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationType + .ObservabilityPipelineKafkaDestinationTypeSerializer.class) +public class ObservabilityPipelineKafkaDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("kafka")); + + public static final ObservabilityPipelineKafkaDestinationType KAFKA = + new ObservabilityPipelineKafkaDestinationType("kafka"); + + ObservabilityPipelineKafkaDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationType fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java similarity index 78% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java index e7211139e49..8d4ed2a1d60 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java @@ -19,15 +19,15 @@ /** * Represents a key-value pair used to configure low-level librdkafka client options - * for Kafka sources, such as timeouts, buffer sizes, and security settings. + * for Kafka source and destination, such as timeouts, buffer sizes, and security settings. */ @JsonPropertyOrder({ - ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_NAME, - ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_VALUE + ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_NAME, + ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_VALUE }) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineKafkaSourceLibrdkafkaOption { +public class ObservabilityPipelineKafkaLibrdkafkaOption { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_NAME = "name"; private String name; @@ -35,17 +35,17 @@ public class ObservabilityPipelineKafkaSourceLibrdkafkaOption { public static final String JSON_PROPERTY_VALUE = "value"; private String value; - public ObservabilityPipelineKafkaSourceLibrdkafkaOption() {} + public ObservabilityPipelineKafkaLibrdkafkaOption() {} @JsonCreator - public ObservabilityPipelineKafkaSourceLibrdkafkaOption( + public ObservabilityPipelineKafkaLibrdkafkaOption( @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { this.name = name; this.value = value; } - public ObservabilityPipelineKafkaSourceLibrdkafkaOption name(String name) { + public ObservabilityPipelineKafkaLibrdkafkaOption name(String name) { this.name = name; return this; } @@ -65,7 +65,7 @@ public void setName(String name) { this.name = name; } - public ObservabilityPipelineKafkaSourceLibrdkafkaOption value(String value) { + public ObservabilityPipelineKafkaLibrdkafkaOption value(String value) { this.value = value; return this; } @@ -97,10 +97,10 @@ public void setValue(String value) { * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineKafkaSourceLibrdkafkaOption + * @return ObservabilityPipelineKafkaLibrdkafkaOption */ @JsonAnySetter - public ObservabilityPipelineKafkaSourceLibrdkafkaOption putAdditionalProperty( + public ObservabilityPipelineKafkaLibrdkafkaOption putAdditionalProperty( String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); @@ -132,7 +132,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineKafkaSourceLibrdkafkaOption object is equal to o. */ + /** Return true if this ObservabilityPipelineKafkaLibrdkafkaOption object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -141,14 +141,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineKafkaSourceLibrdkafkaOption - observabilityPipelineKafkaSourceLibrdkafkaOption = - (ObservabilityPipelineKafkaSourceLibrdkafkaOption) o; - return Objects.equals(this.name, observabilityPipelineKafkaSourceLibrdkafkaOption.name) - && Objects.equals(this.value, observabilityPipelineKafkaSourceLibrdkafkaOption.value) + ObservabilityPipelineKafkaLibrdkafkaOption observabilityPipelineKafkaLibrdkafkaOption = + (ObservabilityPipelineKafkaLibrdkafkaOption) o; + return Objects.equals(this.name, observabilityPipelineKafkaLibrdkafkaOption.name) + && Objects.equals(this.value, observabilityPipelineKafkaLibrdkafkaOption.value) && Objects.equals( this.additionalProperties, - observabilityPipelineKafkaSourceLibrdkafkaOption.additionalProperties); + observabilityPipelineKafkaLibrdkafkaOption.additionalProperties); } @Override @@ -159,7 +158,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineKafkaSourceLibrdkafkaOption {\n"); + sb.append("class ObservabilityPipelineKafkaLibrdkafkaOption {\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" value: ").append(toIndentedString(value)).append("\n"); sb.append(" additionalProperties: ") diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java similarity index 78% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java index 0475f35416b..89c8f57a3ac 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java @@ -17,16 +17,16 @@ import java.util.Objects; /** Specifies the SASL mechanism for authenticating with a Kafka cluster. */ -@JsonPropertyOrder({ObservabilityPipelineKafkaSourceSasl.JSON_PROPERTY_MECHANISM}) +@JsonPropertyOrder({ObservabilityPipelineKafkaSasl.JSON_PROPERTY_MECHANISM}) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineKafkaSourceSasl { +public class ObservabilityPipelineKafkaSasl { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_MECHANISM = "mechanism"; - private ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism; + private ObservabilityPipelineKafkaSaslMechanism mechanism; - public ObservabilityPipelineKafkaSourceSasl mechanism( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + public ObservabilityPipelineKafkaSasl mechanism( + ObservabilityPipelineKafkaSaslMechanism mechanism) { this.mechanism = mechanism; this.unparsed |= !mechanism.isValid(); return this; @@ -40,11 +40,11 @@ public ObservabilityPipelineKafkaSourceSasl mechanism( @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_MECHANISM) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public ObservabilityPipelinePipelineKafkaSourceSaslMechanism getMechanism() { + public ObservabilityPipelineKafkaSaslMechanism getMechanism() { return mechanism; } - public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + public void setMechanism(ObservabilityPipelineKafkaSaslMechanism mechanism) { if (!mechanism.isValid()) { this.unparsed = true; } @@ -63,10 +63,10 @@ public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism m * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineKafkaSourceSasl + * @return ObservabilityPipelineKafkaSasl */ @JsonAnySetter - public ObservabilityPipelineKafkaSourceSasl putAdditionalProperty(String key, Object value) { + public ObservabilityPipelineKafkaSasl putAdditionalProperty(String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); } @@ -97,7 +97,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineKafkaSourceSasl object is equal to o. */ + /** Return true if this ObservabilityPipelineKafkaSasl object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -106,11 +106,11 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineKafkaSourceSasl observabilityPipelineKafkaSourceSasl = - (ObservabilityPipelineKafkaSourceSasl) o; - return Objects.equals(this.mechanism, observabilityPipelineKafkaSourceSasl.mechanism) + ObservabilityPipelineKafkaSasl observabilityPipelineKafkaSasl = + (ObservabilityPipelineKafkaSasl) o; + return Objects.equals(this.mechanism, observabilityPipelineKafkaSasl.mechanism) && Objects.equals( - this.additionalProperties, observabilityPipelineKafkaSourceSasl.additionalProperties); + this.additionalProperties, observabilityPipelineKafkaSasl.additionalProperties); } @Override @@ -121,7 +121,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineKafkaSourceSasl {\n"); + sb.append("class ObservabilityPipelineKafkaSasl {\n"); sb.append(" mechanism: ").append(toIndentedString(mechanism)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java new file mode 100644 index 00000000000..8022d54681f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** SASL mechanism used for Kafka authentication. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaSaslMechanism.ObservabilityPipelineKafkaSaslMechanismSerializer + .class) +public class ObservabilityPipelineKafkaSaslMechanism extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512")); + + public static final ObservabilityPipelineKafkaSaslMechanism PLAIN = + new ObservabilityPipelineKafkaSaslMechanism("PLAIN"); + public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_256 = + new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-256"); + public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_512 = + new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-512"); + + ObservabilityPipelineKafkaSaslMechanism(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaSaslMechanismSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaSaslMechanismSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaSaslMechanismSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaSaslMechanism value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaSaslMechanism fromValue(String value) { + return new ObservabilityPipelineKafkaSaslMechanism(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java index d8c3ea6254d..c74dbd02f69 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java @@ -40,10 +40,10 @@ public class ObservabilityPipelineKafkaSource { private String id; public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options"; - private List librdkafkaOptions = null; + private List librdkafkaOptions = null; public static final String JSON_PROPERTY_SASL = "sasl"; - private ObservabilityPipelineKafkaSourceSasl sasl; + private ObservabilityPipelineKafkaSasl sasl; public static final String JSON_PROPERTY_TLS = "tls"; private ObservabilityPipelineTls tls; @@ -112,16 +112,16 @@ public void setId(String id) { } public ObservabilityPipelineKafkaSource librdkafkaOptions( - List librdkafkaOptions) { + List librdkafkaOptions) { this.librdkafkaOptions = librdkafkaOptions; - for (ObservabilityPipelineKafkaSourceLibrdkafkaOption item : librdkafkaOptions) { + for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) { this.unparsed |= item.unparsed; } return this; } public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem( - ObservabilityPipelineKafkaSourceLibrdkafkaOption librdkafkaOptionsItem) { + ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) { if (this.librdkafkaOptions == null) { this.librdkafkaOptions = new ArrayList<>(); } @@ -138,16 +138,16 @@ public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem( @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public List getLibrdkafkaOptions() { + public List getLibrdkafkaOptions() { return librdkafkaOptions; } public void setLibrdkafkaOptions( - List librdkafkaOptions) { + List librdkafkaOptions) { this.librdkafkaOptions = librdkafkaOptions; } - public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSasl sasl) { + public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSasl sasl) { this.sasl = sasl; this.unparsed |= sasl.unparsed; return this; @@ -161,11 +161,11 @@ public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSas @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_SASL) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public ObservabilityPipelineKafkaSourceSasl getSasl() { + public ObservabilityPipelineKafkaSasl getSasl() { return sasl; } - public void setSasl(ObservabilityPipelineKafkaSourceSasl sasl) { + public void setSasl(ObservabilityPipelineKafkaSasl sasl) { this.sasl = sasl; } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java deleted file mode 100644 index 6ec67b62962..00000000000 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. - * This product includes software developed at Datadog (https://www.datadoghq.com/). - * Copyright 2019-Present Datadog, Inc. - */ - -package com.datadog.api.client.v2.model; - -import com.datadog.api.client.ModelEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.fasterxml.jackson.databind.ser.std.StdSerializer; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - -/** SASL mechanism used for Kafka authentication. */ -@JsonSerialize( - using = - ObservabilityPipelinePipelineKafkaSourceSaslMechanism - .ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer.class) -public class ObservabilityPipelinePipelineKafkaSourceSaslMechanism extends ModelEnum { - - private static final Set allowedValues = - new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512")); - - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism PLAIN = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("PLAIN"); - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_256 = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256"); - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_512 = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512"); - - ObservabilityPipelinePipelineKafkaSourceSaslMechanism(String value) { - super(value, allowedValues); - } - - public static class ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer - extends StdSerializer { - public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer( - Class t) { - super(t); - } - - public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer() { - this(null); - } - - @Override - public void serialize( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism value, - JsonGenerator jgen, - SerializerProvider provider) - throws IOException, JsonProcessingException { - jgen.writeObject(value.value); - } - } - - @JsonCreator - public static ObservabilityPipelinePipelineKafkaSourceSaslMechanism fromValue(String value) { - return new ObservabilityPipelinePipelineKafkaSourceSaslMechanism(value); - } -}