getSchemas() {
/**
* Set the instance that matches the oneOf child schema, check the instance parameter is valid
* against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination,
- * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination,
- * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination,
- * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination,
- * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination,
- * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination,
- * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination,
- * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination,
- * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination,
+ * ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination,
+ * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination,
+ * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination,
+ * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination,
+ * AzureStorageDestination, MicrosoftSentinelDestination,
+ * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination,
+ * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination,
+ * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination,
+ * ObservabilityPipelineAmazonSecurityLakeDestination,
* ObservabilityPipelineCrowdStrikeNextGenSiemDestination,
- * ObservabilityPipelineGooglePubSubDestination
+ * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination
*
* It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a
* composed schema (allOf, anyOf, oneOf).
@@ -1287,6 +1404,11 @@ public void setActualInstance(Object instance) {
super.setActualInstance(instance);
return;
}
+ if (JSON.isInstanceOf(
+ ObservabilityPipelineCloudPremDestination.class, instance, new HashSet>())) {
+ super.setActualInstance(instance);
+ return;
+ }
if (JSON.isInstanceOf(
ObservabilityPipelineAmazonS3Destination.class, instance, new HashSet>())) {
super.setActualInstance(instance);
@@ -1383,6 +1505,11 @@ public void setActualInstance(Object instance) {
super.setActualInstance(instance);
return;
}
+ if (JSON.isInstanceOf(
+ ObservabilityPipelineKafkaDestination.class, instance, new HashSet>())) {
+ super.setActualInstance(instance);
+ return;
+ }
if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) {
super.setActualInstance(instance);
@@ -1390,6 +1517,7 @@ public void setActualInstance(Object instance) {
}
throw new RuntimeException(
"Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination,"
+ + " ObservabilityPipelineCloudPremDestination,"
+ " ObservabilityPipelineAmazonS3Destination,"
+ " ObservabilityPipelineGoogleCloudStorageDestination,"
+ " ObservabilityPipelineSplunkHecDestination,"
@@ -1405,25 +1533,26 @@ public void setActualInstance(Object instance) {
+ " ObservabilityPipelineSocketDestination,"
+ " ObservabilityPipelineAmazonSecurityLakeDestination,"
+ " ObservabilityPipelineCrowdStrikeNextGenSiemDestination,"
- + " ObservabilityPipelineGooglePubSubDestination");
+ + " ObservabilityPipelineGooglePubSubDestination,"
+ + " ObservabilityPipelineKafkaDestination");
}
/**
* Get the actual instance, which can be the following:
- * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineAmazonS3Destination,
- * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination,
- * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination,
- * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination,
- * AzureStorageDestination, MicrosoftSentinelDestination,
- * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination,
- * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination,
- * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination,
- * ObservabilityPipelineAmazonSecurityLakeDestination,
+ * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineCloudPremDestination,
+ * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination,
+ * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination,
+ * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination,
+ * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination,
+ * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination,
+ * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination,
+ * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination,
+ * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination,
* ObservabilityPipelineCrowdStrikeNextGenSiemDestination,
- * ObservabilityPipelineGooglePubSubDestination
+ * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination
*
* @return The actual instance (ObservabilityPipelineDatadogLogsDestination,
- * ObservabilityPipelineAmazonS3Destination,
+ * ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination,
* ObservabilityPipelineGoogleCloudStorageDestination,
* ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination,
* ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination,
@@ -1434,7 +1563,7 @@ public void setActualInstance(Object instance) {
* ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination,
* ObservabilityPipelineAmazonSecurityLakeDestination,
* ObservabilityPipelineCrowdStrikeNextGenSiemDestination,
- * ObservabilityPipelineGooglePubSubDestination)
+ * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination)
*/
@Override
public Object getActualInstance() {
@@ -1454,6 +1583,18 @@ public Object getActualInstance() {
return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance();
}
+ /**
+ * Get the actual instance of `ObservabilityPipelineCloudPremDestination`. If the actual instance
+ * is not `ObservabilityPipelineCloudPremDestination`, the ClassCastException will be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineCloudPremDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineCloudPremDestination`
+ */
+ public ObservabilityPipelineCloudPremDestination getObservabilityPipelineCloudPremDestination()
+ throws ClassCastException {
+ return (ObservabilityPipelineCloudPremDestination) super.getActualInstance();
+ }
+
/**
* Get the actual instance of `ObservabilityPipelineAmazonS3Destination`. If the actual instance
* is not `ObservabilityPipelineAmazonS3Destination`, the ClassCastException will be thrown.
@@ -1682,4 +1823,16 @@ public ObservabilityPipelineSocketDestination getObservabilityPipelineSocketDest
getObservabilityPipelineGooglePubSubDestination() throws ClassCastException {
return (ObservabilityPipelineGooglePubSubDestination) super.getActualInstance();
}
+
+ /**
+ * Get the actual instance of `ObservabilityPipelineKafkaDestination`. If the actual instance is
+ * not `ObservabilityPipelineKafkaDestination`, the ClassCastException will be thrown.
+ *
+ * @return The actual instance of `ObservabilityPipelineKafkaDestination`
+ * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaDestination`
+ */
+ public ObservabilityPipelineKafkaDestination getObservabilityPipelineKafkaDestination()
+ throws ClassCastException {
+ return (ObservabilityPipelineKafkaDestination) super.getActualInstance();
+ }
}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java
new file mode 100644
index 00000000000..2f731674a97
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java
@@ -0,0 +1,595 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/** The kafka destination sends logs to Apache Kafka topics. */
+@JsonPropertyOrder({
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_COMPRESSION,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ENCODING,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_HEADERS_KEY,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ID,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_INPUTS,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_KEY_FIELD,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_LIBRDKAFKA_OPTIONS,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_MESSAGE_TIMEOUT_MS,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_DURATION_SECS,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_NUM,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SASL,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SOCKET_TIMEOUT_MS,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TLS,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TOPIC,
+ ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TYPE
+})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelineKafkaDestination {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_COMPRESSION = "compression";
+ private ObservabilityPipelineKafkaDestinationCompression compression;
+
+ public static final String JSON_PROPERTY_ENCODING = "encoding";
+ private ObservabilityPipelineKafkaDestinationEncoding encoding;
+
+ public static final String JSON_PROPERTY_HEADERS_KEY = "headers_key";
+ private String headersKey;
+
+ public static final String JSON_PROPERTY_ID = "id";
+ private String id;
+
+ public static final String JSON_PROPERTY_INPUTS = "inputs";
+ private List inputs = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_KEY_FIELD = "key_field";
+ private String keyField;
+
+ public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options";
+ private List librdkafkaOptions = null;
+
+ public static final String JSON_PROPERTY_MESSAGE_TIMEOUT_MS = "message_timeout_ms";
+ private Long messageTimeoutMs;
+
+ public static final String JSON_PROPERTY_RATE_LIMIT_DURATION_SECS = "rate_limit_duration_secs";
+ private Long rateLimitDurationSecs;
+
+ public static final String JSON_PROPERTY_RATE_LIMIT_NUM = "rate_limit_num";
+ private Long rateLimitNum;
+
+ public static final String JSON_PROPERTY_SASL = "sasl";
+ private ObservabilityPipelineKafkaSasl sasl;
+
+ public static final String JSON_PROPERTY_SOCKET_TIMEOUT_MS = "socket_timeout_ms";
+ private Long socketTimeoutMs;
+
+ public static final String JSON_PROPERTY_TLS = "tls";
+ private ObservabilityPipelineTls tls;
+
+ public static final String JSON_PROPERTY_TOPIC = "topic";
+ private String topic;
+
+ public static final String JSON_PROPERTY_TYPE = "type";
+ private ObservabilityPipelineKafkaDestinationType type =
+ ObservabilityPipelineKafkaDestinationType.KAFKA;
+
+ public ObservabilityPipelineKafkaDestination() {}
+
+ @JsonCreator
+ public ObservabilityPipelineKafkaDestination(
+ @JsonProperty(required = true, value = JSON_PROPERTY_ENCODING)
+ ObservabilityPipelineKafkaDestinationEncoding encoding,
+ @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id,
+ @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs,
+ @JsonProperty(required = true, value = JSON_PROPERTY_TOPIC) String topic,
+ @JsonProperty(required = true, value = JSON_PROPERTY_TYPE)
+ ObservabilityPipelineKafkaDestinationType type) {
+ this.encoding = encoding;
+ this.unparsed |= !encoding.isValid();
+ this.id = id;
+ this.inputs = inputs;
+ this.topic = topic;
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ }
+
+ public ObservabilityPipelineKafkaDestination compression(
+ ObservabilityPipelineKafkaDestinationCompression compression) {
+ this.compression = compression;
+ this.unparsed |= !compression.isValid();
+ return this;
+ }
+
+ /**
+ * Compression codec for Kafka messages.
+ *
+ * @return compression
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_COMPRESSION)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineKafkaDestinationCompression getCompression() {
+ return compression;
+ }
+
+ public void setCompression(ObservabilityPipelineKafkaDestinationCompression compression) {
+ if (!compression.isValid()) {
+ this.unparsed = true;
+ }
+ this.compression = compression;
+ }
+
+ public ObservabilityPipelineKafkaDestination encoding(
+ ObservabilityPipelineKafkaDestinationEncoding encoding) {
+ this.encoding = encoding;
+ this.unparsed |= !encoding.isValid();
+ return this;
+ }
+
+ /**
+ * Encoding format for log events.
+ *
+ * @return encoding
+ */
+ @JsonProperty(JSON_PROPERTY_ENCODING)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineKafkaDestinationEncoding getEncoding() {
+ return encoding;
+ }
+
+ public void setEncoding(ObservabilityPipelineKafkaDestinationEncoding encoding) {
+ if (!encoding.isValid()) {
+ this.unparsed = true;
+ }
+ this.encoding = encoding;
+ }
+
+ public ObservabilityPipelineKafkaDestination headersKey(String headersKey) {
+ this.headersKey = headersKey;
+ return this;
+ }
+
+ /**
+ * The field name to use for Kafka message headers.
+ *
+ * @return headersKey
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_HEADERS_KEY)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public String getHeadersKey() {
+ return headersKey;
+ }
+
+ public void setHeadersKey(String headersKey) {
+ this.headersKey = headersKey;
+ }
+
+ public ObservabilityPipelineKafkaDestination id(String id) {
+ this.id = id;
+ return this;
+ }
+
+ /**
+ * The unique identifier for this component.
+ *
+ * @return id
+ */
+ @JsonProperty(JSON_PROPERTY_ID)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public ObservabilityPipelineKafkaDestination inputs(List inputs) {
+ this.inputs = inputs;
+ return this;
+ }
+
+ public ObservabilityPipelineKafkaDestination addInputsItem(String inputsItem) {
+ this.inputs.add(inputsItem);
+ return this;
+ }
+
+ /**
+ * A list of component IDs whose output is used as the input for this component.
+ *
+ * @return inputs
+ */
+ @JsonProperty(JSON_PROPERTY_INPUTS)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getInputs() {
+ return inputs;
+ }
+
+ public void setInputs(List inputs) {
+ this.inputs = inputs;
+ }
+
+ public ObservabilityPipelineKafkaDestination keyField(String keyField) {
+ this.keyField = keyField;
+ return this;
+ }
+
+ /**
+ * The field name to use as the Kafka message key.
+ *
+ * @return keyField
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_KEY_FIELD)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public String getKeyField() {
+ return keyField;
+ }
+
+ public void setKeyField(String keyField) {
+ this.keyField = keyField;
+ }
+
+ public ObservabilityPipelineKafkaDestination librdkafkaOptions(
+ List librdkafkaOptions) {
+ this.librdkafkaOptions = librdkafkaOptions;
+ for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) {
+ this.unparsed |= item.unparsed;
+ }
+ return this;
+ }
+
+ public ObservabilityPipelineKafkaDestination addLibrdkafkaOptionsItem(
+ ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) {
+ if (this.librdkafkaOptions == null) {
+ this.librdkafkaOptions = new ArrayList<>();
+ }
+ this.librdkafkaOptions.add(librdkafkaOptionsItem);
+ this.unparsed |= librdkafkaOptionsItem.unparsed;
+ return this;
+ }
+
+ /**
+ * Optional list of advanced Kafka producer configuration options, defined as key-value pairs.
+ *
+ * @return librdkafkaOptions
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public List getLibrdkafkaOptions() {
+ return librdkafkaOptions;
+ }
+
+ public void setLibrdkafkaOptions(
+ List librdkafkaOptions) {
+ this.librdkafkaOptions = librdkafkaOptions;
+ }
+
+ public ObservabilityPipelineKafkaDestination messageTimeoutMs(Long messageTimeoutMs) {
+ this.messageTimeoutMs = messageTimeoutMs;
+ return this;
+ }
+
+ /**
+ * Maximum time in milliseconds to wait for message delivery confirmation. minimum: 1
+ *
+ * @return messageTimeoutMs
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_MESSAGE_TIMEOUT_MS)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public Long getMessageTimeoutMs() {
+ return messageTimeoutMs;
+ }
+
+ public void setMessageTimeoutMs(Long messageTimeoutMs) {
+ this.messageTimeoutMs = messageTimeoutMs;
+ }
+
+ public ObservabilityPipelineKafkaDestination rateLimitDurationSecs(Long rateLimitDurationSecs) {
+ this.rateLimitDurationSecs = rateLimitDurationSecs;
+ return this;
+ }
+
+ /**
+ * Duration in seconds for the rate limit window. minimum: 1
+ *
+ * @return rateLimitDurationSecs
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_RATE_LIMIT_DURATION_SECS)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public Long getRateLimitDurationSecs() {
+ return rateLimitDurationSecs;
+ }
+
+ public void setRateLimitDurationSecs(Long rateLimitDurationSecs) {
+ this.rateLimitDurationSecs = rateLimitDurationSecs;
+ }
+
+ public ObservabilityPipelineKafkaDestination rateLimitNum(Long rateLimitNum) {
+ this.rateLimitNum = rateLimitNum;
+ return this;
+ }
+
+ /**
+ * Maximum number of messages allowed per rate limit duration. minimum: 1
+ *
+ * @return rateLimitNum
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_RATE_LIMIT_NUM)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public Long getRateLimitNum() {
+ return rateLimitNum;
+ }
+
+ public void setRateLimitNum(Long rateLimitNum) {
+ this.rateLimitNum = rateLimitNum;
+ }
+
+ public ObservabilityPipelineKafkaDestination sasl(ObservabilityPipelineKafkaSasl sasl) {
+ this.sasl = sasl;
+ this.unparsed |= sasl.unparsed;
+ return this;
+ }
+
+ /**
+ * Specifies the SASL mechanism for authenticating with a Kafka cluster.
+ *
+ * @return sasl
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_SASL)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineKafkaSasl getSasl() {
+ return sasl;
+ }
+
+ public void setSasl(ObservabilityPipelineKafkaSasl sasl) {
+ this.sasl = sasl;
+ }
+
+ public ObservabilityPipelineKafkaDestination socketTimeoutMs(Long socketTimeoutMs) {
+ this.socketTimeoutMs = socketTimeoutMs;
+ return this;
+ }
+
+ /**
+ * Socket timeout in milliseconds for network requests. minimum: 10 maximum: 300000
+ *
+ * @return socketTimeoutMs
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_SOCKET_TIMEOUT_MS)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public Long getSocketTimeoutMs() {
+ return socketTimeoutMs;
+ }
+
+ public void setSocketTimeoutMs(Long socketTimeoutMs) {
+ this.socketTimeoutMs = socketTimeoutMs;
+ }
+
+ public ObservabilityPipelineKafkaDestination tls(ObservabilityPipelineTls tls) {
+ this.tls = tls;
+ this.unparsed |= tls.unparsed;
+ return this;
+ }
+
+ /**
+ * Configuration for enabling TLS encryption between the pipeline component and external services.
+ *
+ * @return tls
+ */
+ @jakarta.annotation.Nullable
+ @JsonProperty(JSON_PROPERTY_TLS)
+ @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
+ public ObservabilityPipelineTls getTls() {
+ return tls;
+ }
+
+ public void setTls(ObservabilityPipelineTls tls) {
+ this.tls = tls;
+ }
+
+ public ObservabilityPipelineKafkaDestination topic(String topic) {
+ this.topic = topic;
+ return this;
+ }
+
+ /**
+ * The Kafka topic name to publish logs to.
+ *
+ * @return topic
+ */
+ @JsonProperty(JSON_PROPERTY_TOPIC)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getTopic() {
+ return topic;
+ }
+
+ public void setTopic(String topic) {
+ this.topic = topic;
+ }
+
+ public ObservabilityPipelineKafkaDestination type(
+ ObservabilityPipelineKafkaDestinationType type) {
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ return this;
+ }
+
+ /**
+ * The destination type. The value should always be kafka.
+ *
+ * @return type
+ */
+ @JsonProperty(JSON_PROPERTY_TYPE)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineKafkaDestinationType getType() {
+ return type;
+ }
+
+ public void setType(ObservabilityPipelineKafkaDestinationType type) {
+ if (!type.isValid()) {
+ this.unparsed = true;
+ }
+ this.type = type;
+ }
+
+ /**
+ * A container for additional, undeclared properties. This is a holder for any undeclared
+ * properties as specified with the 'additionalProperties' keyword in the OAS document.
+ */
+ private Map additionalProperties;
+
+ /**
+ * Set the additional (undeclared) property with the specified name and value. If the property
+ * does not already exist, create it otherwise replace it.
+ *
+ * @param key The arbitrary key to set
+ * @param value The associated value
+ * @return ObservabilityPipelineKafkaDestination
+ */
+ @JsonAnySetter
+ public ObservabilityPipelineKafkaDestination putAdditionalProperty(String key, Object value) {
+ if (this.additionalProperties == null) {
+ this.additionalProperties = new HashMap();
+ }
+ this.additionalProperties.put(key, value);
+ return this;
+ }
+
+ /**
+ * Return the additional (undeclared) property.
+ *
+ * @return The additional properties
+ */
+ @JsonAnyGetter
+ public Map getAdditionalProperties() {
+ return additionalProperties;
+ }
+
+ /**
+ * Return the additional (undeclared) property with the specified name.
+ *
+ * @param key The arbitrary key to get
+ * @return The specific additional property for the given key
+ */
+ public Object getAdditionalProperty(String key) {
+ if (this.additionalProperties == null) {
+ return null;
+ }
+ return this.additionalProperties.get(key);
+ }
+
+ /** Return true if this ObservabilityPipelineKafkaDestination object is equal to o. */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ObservabilityPipelineKafkaDestination observabilityPipelineKafkaDestination =
+ (ObservabilityPipelineKafkaDestination) o;
+ return Objects.equals(this.compression, observabilityPipelineKafkaDestination.compression)
+ && Objects.equals(this.encoding, observabilityPipelineKafkaDestination.encoding)
+ && Objects.equals(this.headersKey, observabilityPipelineKafkaDestination.headersKey)
+ && Objects.equals(this.id, observabilityPipelineKafkaDestination.id)
+ && Objects.equals(this.inputs, observabilityPipelineKafkaDestination.inputs)
+ && Objects.equals(this.keyField, observabilityPipelineKafkaDestination.keyField)
+ && Objects.equals(
+ this.librdkafkaOptions, observabilityPipelineKafkaDestination.librdkafkaOptions)
+ && Objects.equals(
+ this.messageTimeoutMs, observabilityPipelineKafkaDestination.messageTimeoutMs)
+ && Objects.equals(
+ this.rateLimitDurationSecs, observabilityPipelineKafkaDestination.rateLimitDurationSecs)
+ && Objects.equals(this.rateLimitNum, observabilityPipelineKafkaDestination.rateLimitNum)
+ && Objects.equals(this.sasl, observabilityPipelineKafkaDestination.sasl)
+ && Objects.equals(
+ this.socketTimeoutMs, observabilityPipelineKafkaDestination.socketTimeoutMs)
+ && Objects.equals(this.tls, observabilityPipelineKafkaDestination.tls)
+ && Objects.equals(this.topic, observabilityPipelineKafkaDestination.topic)
+ && Objects.equals(this.type, observabilityPipelineKafkaDestination.type)
+ && Objects.equals(
+ this.additionalProperties, observabilityPipelineKafkaDestination.additionalProperties);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ compression,
+ encoding,
+ headersKey,
+ id,
+ inputs,
+ keyField,
+ librdkafkaOptions,
+ messageTimeoutMs,
+ rateLimitDurationSecs,
+ rateLimitNum,
+ sasl,
+ socketTimeoutMs,
+ tls,
+ topic,
+ type,
+ additionalProperties);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("class ObservabilityPipelineKafkaDestination {\n");
+ sb.append(" compression: ").append(toIndentedString(compression)).append("\n");
+ sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n");
+ sb.append(" headersKey: ").append(toIndentedString(headersKey)).append("\n");
+ sb.append(" id: ").append(toIndentedString(id)).append("\n");
+ sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
+ sb.append(" keyField: ").append(toIndentedString(keyField)).append("\n");
+ sb.append(" librdkafkaOptions: ").append(toIndentedString(librdkafkaOptions)).append("\n");
+ sb.append(" messageTimeoutMs: ").append(toIndentedString(messageTimeoutMs)).append("\n");
+ sb.append(" rateLimitDurationSecs: ")
+ .append(toIndentedString(rateLimitDurationSecs))
+ .append("\n");
+ sb.append(" rateLimitNum: ").append(toIndentedString(rateLimitNum)).append("\n");
+ sb.append(" sasl: ").append(toIndentedString(sasl)).append("\n");
+ sb.append(" socketTimeoutMs: ").append(toIndentedString(socketTimeoutMs)).append("\n");
+ sb.append(" tls: ").append(toIndentedString(tls)).append("\n");
+ sb.append(" topic: ").append(toIndentedString(topic)).append("\n");
+ sb.append(" type: ").append(toIndentedString(type)).append("\n");
+ sb.append(" additionalProperties: ")
+ .append(toIndentedString(additionalProperties))
+ .append("\n");
+ sb.append('}');
+ return sb.toString();
+ }
+
+ /**
+ * Convert the given object to string with each line indented by 4 spaces (except the first line).
+ */
+ private String toIndentedString(Object o) {
+ if (o == null) {
+ return "null";
+ }
+ return o.toString().replace("\n", "\n ");
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java
new file mode 100644
index 00000000000..0a66e9a713f
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java
@@ -0,0 +1,71 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** Compression codec for Kafka messages. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineKafkaDestinationCompression
+ .ObservabilityPipelineKafkaDestinationCompressionSerializer.class)
+public class ObservabilityPipelineKafkaDestinationCompression extends ModelEnum {
+
+ private static final Set allowedValues =
+ new HashSet(Arrays.asList("none", "gzip", "snappy", "lz4", "zstd"));
+
+ public static final ObservabilityPipelineKafkaDestinationCompression NONE =
+ new ObservabilityPipelineKafkaDestinationCompression("none");
+ public static final ObservabilityPipelineKafkaDestinationCompression GZIP =
+ new ObservabilityPipelineKafkaDestinationCompression("gzip");
+ public static final ObservabilityPipelineKafkaDestinationCompression SNAPPY =
+ new ObservabilityPipelineKafkaDestinationCompression("snappy");
+ public static final ObservabilityPipelineKafkaDestinationCompression LZ4 =
+ new ObservabilityPipelineKafkaDestinationCompression("lz4");
+ public static final ObservabilityPipelineKafkaDestinationCompression ZSTD =
+ new ObservabilityPipelineKafkaDestinationCompression("zstd");
+
+ ObservabilityPipelineKafkaDestinationCompression(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineKafkaDestinationCompressionSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineKafkaDestinationCompressionSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineKafkaDestinationCompressionSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineKafkaDestinationCompression value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineKafkaDestinationCompression fromValue(String value) {
+ return new ObservabilityPipelineKafkaDestinationCompression(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java
new file mode 100644
index 00000000000..57abf5f828e
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java
@@ -0,0 +1,65 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** Encoding format for log events. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineKafkaDestinationEncoding
+ .ObservabilityPipelineKafkaDestinationEncodingSerializer.class)
+public class ObservabilityPipelineKafkaDestinationEncoding extends ModelEnum {
+
+ private static final Set allowedValues =
+ new HashSet(Arrays.asList("json", "raw_message"));
+
+ public static final ObservabilityPipelineKafkaDestinationEncoding JSON =
+ new ObservabilityPipelineKafkaDestinationEncoding("json");
+ public static final ObservabilityPipelineKafkaDestinationEncoding RAW_MESSAGE =
+ new ObservabilityPipelineKafkaDestinationEncoding("raw_message");
+
+ ObservabilityPipelineKafkaDestinationEncoding(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineKafkaDestinationEncodingSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineKafkaDestinationEncodingSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineKafkaDestinationEncodingSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineKafkaDestinationEncoding value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineKafkaDestinationEncoding fromValue(String value) {
+ return new ObservabilityPipelineKafkaDestinationEncoding(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java
new file mode 100644
index 00000000000..0967bbb11c5
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java
@@ -0,0 +1,62 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** The destination type. The value should always be kafka. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineKafkaDestinationType
+ .ObservabilityPipelineKafkaDestinationTypeSerializer.class)
+public class ObservabilityPipelineKafkaDestinationType extends ModelEnum {
+
+ private static final Set allowedValues = new HashSet(Arrays.asList("kafka"));
+
+ public static final ObservabilityPipelineKafkaDestinationType KAFKA =
+ new ObservabilityPipelineKafkaDestinationType("kafka");
+
+ ObservabilityPipelineKafkaDestinationType(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineKafkaDestinationTypeSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineKafkaDestinationTypeSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineKafkaDestinationTypeSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineKafkaDestinationType value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineKafkaDestinationType fromValue(String value) {
+ return new ObservabilityPipelineKafkaDestinationType(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java
similarity index 78%
rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java
rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java
index e7211139e49..8d4ed2a1d60 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java
@@ -19,15 +19,15 @@
/**
* Represents a key-value pair used to configure low-level librdkafka client options
- * for Kafka sources, such as timeouts, buffer sizes, and security settings.
+ * for Kafka source and destination, such as timeouts, buffer sizes, and security settings.
*/
@JsonPropertyOrder({
- ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_NAME,
- ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_VALUE
+ ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_NAME,
+ ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_VALUE
})
@jakarta.annotation.Generated(
value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
-public class ObservabilityPipelineKafkaSourceLibrdkafkaOption {
+public class ObservabilityPipelineKafkaLibrdkafkaOption {
@JsonIgnore public boolean unparsed = false;
public static final String JSON_PROPERTY_NAME = "name";
private String name;
@@ -35,17 +35,17 @@ public class ObservabilityPipelineKafkaSourceLibrdkafkaOption {
public static final String JSON_PROPERTY_VALUE = "value";
private String value;
- public ObservabilityPipelineKafkaSourceLibrdkafkaOption() {}
+ public ObservabilityPipelineKafkaLibrdkafkaOption() {}
@JsonCreator
- public ObservabilityPipelineKafkaSourceLibrdkafkaOption(
+ public ObservabilityPipelineKafkaLibrdkafkaOption(
@JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name,
@JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) {
this.name = name;
this.value = value;
}
- public ObservabilityPipelineKafkaSourceLibrdkafkaOption name(String name) {
+ public ObservabilityPipelineKafkaLibrdkafkaOption name(String name) {
this.name = name;
return this;
}
@@ -65,7 +65,7 @@ public void setName(String name) {
this.name = name;
}
- public ObservabilityPipelineKafkaSourceLibrdkafkaOption value(String value) {
+ public ObservabilityPipelineKafkaLibrdkafkaOption value(String value) {
this.value = value;
return this;
}
@@ -97,10 +97,10 @@ public void setValue(String value) {
*
* @param key The arbitrary key to set
* @param value The associated value
- * @return ObservabilityPipelineKafkaSourceLibrdkafkaOption
+ * @return ObservabilityPipelineKafkaLibrdkafkaOption
*/
@JsonAnySetter
- public ObservabilityPipelineKafkaSourceLibrdkafkaOption putAdditionalProperty(
+ public ObservabilityPipelineKafkaLibrdkafkaOption putAdditionalProperty(
String key, Object value) {
if (this.additionalProperties == null) {
this.additionalProperties = new HashMap();
@@ -132,7 +132,7 @@ public Object getAdditionalProperty(String key) {
return this.additionalProperties.get(key);
}
- /** Return true if this ObservabilityPipelineKafkaSourceLibrdkafkaOption object is equal to o. */
+ /** Return true if this ObservabilityPipelineKafkaLibrdkafkaOption object is equal to o. */
@Override
public boolean equals(Object o) {
if (this == o) {
@@ -141,14 +141,13 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) {
return false;
}
- ObservabilityPipelineKafkaSourceLibrdkafkaOption
- observabilityPipelineKafkaSourceLibrdkafkaOption =
- (ObservabilityPipelineKafkaSourceLibrdkafkaOption) o;
- return Objects.equals(this.name, observabilityPipelineKafkaSourceLibrdkafkaOption.name)
- && Objects.equals(this.value, observabilityPipelineKafkaSourceLibrdkafkaOption.value)
+ ObservabilityPipelineKafkaLibrdkafkaOption observabilityPipelineKafkaLibrdkafkaOption =
+ (ObservabilityPipelineKafkaLibrdkafkaOption) o;
+ return Objects.equals(this.name, observabilityPipelineKafkaLibrdkafkaOption.name)
+ && Objects.equals(this.value, observabilityPipelineKafkaLibrdkafkaOption.value)
&& Objects.equals(
this.additionalProperties,
- observabilityPipelineKafkaSourceLibrdkafkaOption.additionalProperties);
+ observabilityPipelineKafkaLibrdkafkaOption.additionalProperties);
}
@Override
@@ -159,7 +158,7 @@ public int hashCode() {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("class ObservabilityPipelineKafkaSourceLibrdkafkaOption {\n");
+ sb.append("class ObservabilityPipelineKafkaLibrdkafkaOption {\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" value: ").append(toIndentedString(value)).append("\n");
sb.append(" additionalProperties: ")
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java
similarity index 78%
rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java
rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java
index 0475f35416b..89c8f57a3ac 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java
@@ -17,16 +17,16 @@
import java.util.Objects;
/** Specifies the SASL mechanism for authenticating with a Kafka cluster. */
-@JsonPropertyOrder({ObservabilityPipelineKafkaSourceSasl.JSON_PROPERTY_MECHANISM})
+@JsonPropertyOrder({ObservabilityPipelineKafkaSasl.JSON_PROPERTY_MECHANISM})
@jakarta.annotation.Generated(
value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
-public class ObservabilityPipelineKafkaSourceSasl {
+public class ObservabilityPipelineKafkaSasl {
@JsonIgnore public boolean unparsed = false;
public static final String JSON_PROPERTY_MECHANISM = "mechanism";
- private ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism;
+ private ObservabilityPipelineKafkaSaslMechanism mechanism;
- public ObservabilityPipelineKafkaSourceSasl mechanism(
- ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) {
+ public ObservabilityPipelineKafkaSasl mechanism(
+ ObservabilityPipelineKafkaSaslMechanism mechanism) {
this.mechanism = mechanism;
this.unparsed |= !mechanism.isValid();
return this;
@@ -40,11 +40,11 @@ public ObservabilityPipelineKafkaSourceSasl mechanism(
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_MECHANISM)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
- public ObservabilityPipelinePipelineKafkaSourceSaslMechanism getMechanism() {
+ public ObservabilityPipelineKafkaSaslMechanism getMechanism() {
return mechanism;
}
- public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) {
+ public void setMechanism(ObservabilityPipelineKafkaSaslMechanism mechanism) {
if (!mechanism.isValid()) {
this.unparsed = true;
}
@@ -63,10 +63,10 @@ public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism m
*
* @param key The arbitrary key to set
* @param value The associated value
- * @return ObservabilityPipelineKafkaSourceSasl
+ * @return ObservabilityPipelineKafkaSasl
*/
@JsonAnySetter
- public ObservabilityPipelineKafkaSourceSasl putAdditionalProperty(String key, Object value) {
+ public ObservabilityPipelineKafkaSasl putAdditionalProperty(String key, Object value) {
if (this.additionalProperties == null) {
this.additionalProperties = new HashMap();
}
@@ -97,7 +97,7 @@ public Object getAdditionalProperty(String key) {
return this.additionalProperties.get(key);
}
- /** Return true if this ObservabilityPipelineKafkaSourceSasl object is equal to o. */
+ /** Return true if this ObservabilityPipelineKafkaSasl object is equal to o. */
@Override
public boolean equals(Object o) {
if (this == o) {
@@ -106,11 +106,11 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) {
return false;
}
- ObservabilityPipelineKafkaSourceSasl observabilityPipelineKafkaSourceSasl =
- (ObservabilityPipelineKafkaSourceSasl) o;
- return Objects.equals(this.mechanism, observabilityPipelineKafkaSourceSasl.mechanism)
+ ObservabilityPipelineKafkaSasl observabilityPipelineKafkaSasl =
+ (ObservabilityPipelineKafkaSasl) o;
+ return Objects.equals(this.mechanism, observabilityPipelineKafkaSasl.mechanism)
&& Objects.equals(
- this.additionalProperties, observabilityPipelineKafkaSourceSasl.additionalProperties);
+ this.additionalProperties, observabilityPipelineKafkaSasl.additionalProperties);
}
@Override
@@ -121,7 +121,7 @@ public int hashCode() {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("class ObservabilityPipelineKafkaSourceSasl {\n");
+ sb.append("class ObservabilityPipelineKafkaSasl {\n");
sb.append(" mechanism: ").append(toIndentedString(mechanism)).append("\n");
sb.append(" additionalProperties: ")
.append(toIndentedString(additionalProperties))
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java
new file mode 100644
index 00000000000..8022d54681f
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java
@@ -0,0 +1,67 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** SASL mechanism used for Kafka authentication. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineKafkaSaslMechanism.ObservabilityPipelineKafkaSaslMechanismSerializer
+ .class)
+public class ObservabilityPipelineKafkaSaslMechanism extends ModelEnum {
+
+ private static final Set allowedValues =
+ new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"));
+
+ public static final ObservabilityPipelineKafkaSaslMechanism PLAIN =
+ new ObservabilityPipelineKafkaSaslMechanism("PLAIN");
+ public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_256 =
+ new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-256");
+ public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_512 =
+ new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-512");
+
+ ObservabilityPipelineKafkaSaslMechanism(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineKafkaSaslMechanismSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineKafkaSaslMechanismSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineKafkaSaslMechanismSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineKafkaSaslMechanism value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineKafkaSaslMechanism fromValue(String value) {
+ return new ObservabilityPipelineKafkaSaslMechanism(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java
index d8c3ea6254d..c74dbd02f69 100644
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java
@@ -40,10 +40,10 @@ public class ObservabilityPipelineKafkaSource {
private String id;
public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options";
- private List librdkafkaOptions = null;
+ private List librdkafkaOptions = null;
public static final String JSON_PROPERTY_SASL = "sasl";
- private ObservabilityPipelineKafkaSourceSasl sasl;
+ private ObservabilityPipelineKafkaSasl sasl;
public static final String JSON_PROPERTY_TLS = "tls";
private ObservabilityPipelineTls tls;
@@ -112,16 +112,16 @@ public void setId(String id) {
}
public ObservabilityPipelineKafkaSource librdkafkaOptions(
- List librdkafkaOptions) {
+ List librdkafkaOptions) {
this.librdkafkaOptions = librdkafkaOptions;
- for (ObservabilityPipelineKafkaSourceLibrdkafkaOption item : librdkafkaOptions) {
+ for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) {
this.unparsed |= item.unparsed;
}
return this;
}
public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem(
- ObservabilityPipelineKafkaSourceLibrdkafkaOption librdkafkaOptionsItem) {
+ ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) {
if (this.librdkafkaOptions == null) {
this.librdkafkaOptions = new ArrayList<>();
}
@@ -138,16 +138,16 @@ public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem(
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
- public List getLibrdkafkaOptions() {
+ public List getLibrdkafkaOptions() {
return librdkafkaOptions;
}
public void setLibrdkafkaOptions(
- List librdkafkaOptions) {
+ List librdkafkaOptions) {
this.librdkafkaOptions = librdkafkaOptions;
}
- public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSasl sasl) {
+ public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSasl sasl) {
this.sasl = sasl;
this.unparsed |= sasl.unparsed;
return this;
@@ -161,11 +161,11 @@ public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSas
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_SASL)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
- public ObservabilityPipelineKafkaSourceSasl getSasl() {
+ public ObservabilityPipelineKafkaSasl getSasl() {
return sasl;
}
- public void setSasl(ObservabilityPipelineKafkaSourceSasl sasl) {
+ public void setSasl(ObservabilityPipelineKafkaSasl sasl) {
this.sasl = sasl;
}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java
deleted file mode 100644
index 6ec67b62962..00000000000
--- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
- * This product includes software developed at Datadog (https://www.datadoghq.com/).
- * Copyright 2019-Present Datadog, Inc.
- */
-
-package com.datadog.api.client.v2.model;
-
-import com.datadog.api.client.ModelEnum;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.core.JsonGenerator;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.SerializerProvider;
-import com.fasterxml.jackson.databind.annotation.JsonSerialize;
-import com.fasterxml.jackson.databind.ser.std.StdSerializer;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-
-/** SASL mechanism used for Kafka authentication. */
-@JsonSerialize(
- using =
- ObservabilityPipelinePipelineKafkaSourceSaslMechanism
- .ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer.class)
-public class ObservabilityPipelinePipelineKafkaSourceSaslMechanism extends ModelEnum {
-
- private static final Set allowedValues =
- new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"));
-
- public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism PLAIN =
- new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("PLAIN");
- public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_256 =
- new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256");
- public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_512 =
- new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512");
-
- ObservabilityPipelinePipelineKafkaSourceSaslMechanism(String value) {
- super(value, allowedValues);
- }
-
- public static class ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer
- extends StdSerializer {
- public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer(
- Class t) {
- super(t);
- }
-
- public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer() {
- this(null);
- }
-
- @Override
- public void serialize(
- ObservabilityPipelinePipelineKafkaSourceSaslMechanism value,
- JsonGenerator jgen,
- SerializerProvider provider)
- throws IOException, JsonProcessingException {
- jgen.writeObject(value.value);
- }
- }
-
- @JsonCreator
- public static ObservabilityPipelinePipelineKafkaSourceSaslMechanism fromValue(String value) {
- return new ObservabilityPipelinePipelineKafkaSourceSaslMechanism(value);
- }
-}