diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b74841..184c50dcc6b 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -35178,6 +35178,43 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: The `add_hostname` processor adds the hostname to log events. + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. properties: @@ -35600,24 +35637,27 @@ components: ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: @@ -37279,6 +37319,72 @@ components: type: string x-enum-varnames: - PARSE_JSON + ObservabilityPipelineParseXMLProcessor: + description: The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. + enum: + - parse_xml + example: parse_xml + type: string + x-enum-varnames: + - PARSE_XML ObservabilityPipelinePipelineKafkaSourceSaslMechanism: description: SASL mechanism used for Kafka authentication. enum: @@ -38442,6 +38548,68 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: The `split_array` processor splits array fields into separate events + based on configured rules. + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: description: The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java new file mode 100644 index 00000000000..f6538cf3781 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java @@ -0,0 +1,268 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The add_hostname processor adds the hostname to log events. */ +@JsonPropertyOrder({ + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAddHostnameProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAddHostnameProcessorType type = + ObservabilityPipelineAddHostnameProcessorType.ADD_HOSTNAME; + + public ObservabilityPipelineAddHostnameProcessor() {} + + @JsonCreator + public ObservabilityPipelineAddHostnameProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAddHostnameProcessorType type) { + this.enabled = enabled; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAddHostnameProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineAddHostnameProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineAddHostnameProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAddHostnameProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineAddHostnameProcessor type( + ObservabilityPipelineAddHostnameProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be add_hostname. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAddHostnameProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineAddHostnameProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAddHostnameProcessor + */ + @JsonAnySetter + public ObservabilityPipelineAddHostnameProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAddHostnameProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAddHostnameProcessor observabilityPipelineAddHostnameProcessor = + (ObservabilityPipelineAddHostnameProcessor) o; + return Objects.equals(this.displayName, observabilityPipelineAddHostnameProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineAddHostnameProcessor.enabled) + && Objects.equals(this.id, observabilityPipelineAddHostnameProcessor.id) + && Objects.equals(this.include, observabilityPipelineAddHostnameProcessor.include) + && Objects.equals(this.type, observabilityPipelineAddHostnameProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAddHostnameProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, enabled, id, include, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAddHostnameProcessor {\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java new file mode 100644 index 00000000000..c635c6c8a00 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be add_hostname. */ +@JsonSerialize( + using = + ObservabilityPipelineAddHostnameProcessorType + .ObservabilityPipelineAddHostnameProcessorTypeSerializer.class) +public class ObservabilityPipelineAddHostnameProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("add_hostname")); + + public static final ObservabilityPipelineAddHostnameProcessorType ADD_HOSTNAME = + new ObservabilityPipelineAddHostnameProcessorType("add_hostname"); + + ObservabilityPipelineAddHostnameProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAddHostnameProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAddHostnameProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAddHostnameProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAddHostnameProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAddHostnameProcessorType fromValue(String value) { + return new ObservabilityPipelineAddHostnameProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java index 3ec77e02e20..0c4d9771729 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java @@ -90,715 +90,710 @@ public ObservabilityPipelineConfigProcessorItem deserialize( boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); int match = 0; JsonToken token = tree.traverse(jp.getCodec()).nextToken(); - // deserialize ObservabilityPipelineFilterProcessor + // deserialize ObservabilityPipelineAddEnvVarsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineFilterProcessor.class.equals(Integer.class) - || ObservabilityPipelineFilterProcessor.class.equals(Long.class) - || ObservabilityPipelineFilterProcessor.class.equals(Float.class) - || ObservabilityPipelineFilterProcessor.class.equals(Double.class) - || ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) - || ObservabilityPipelineFilterProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineFilterProcessor.class.equals(Integer.class) - || ObservabilityPipelineFilterProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineFilterProcessor.class.equals(Float.class) - || ObservabilityPipelineFilterProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineFilterProcessor.class.equals(String.class) + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFilterProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineFilterProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFilterProcessor'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineFilterProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'", e); } - // deserialize ObservabilityPipelineParseJSONProcessor + // deserialize ObservabilityPipelineAddFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineParseJSONProcessor.class.equals(String.class) + (ObservabilityPipelineAddFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineParseJSONProcessor.class); + .readValueAs(ObservabilityPipelineAddFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddFieldsProcessor'", e); } - // deserialize ObservabilityPipelineQuotaProcessor + // deserialize ObservabilityPipelineAddHostnameProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Long.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Float.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Double.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) - || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddHostnameProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Long.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Float.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Double.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddHostnameProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddHostnameProcessor.class.equals(Float.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddHostnameProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineQuotaProcessor.class.equals(String.class) + (ObservabilityPipelineAddHostnameProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class); + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAddHostnameProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddHostnameProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddHostnameProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddHostnameProcessor'", e); } - // deserialize ObservabilityPipelineAddFieldsProcessor + // deserialize ObservabilityPipelineCustomProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class) + || ObservabilityPipelineCustomProcessor.class.equals(Long.class) + || ObservabilityPipelineCustomProcessor.class.equals(Float.class) + || ObservabilityPipelineCustomProcessor.class.equals(Double.class) + || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) + || ObservabilityPipelineCustomProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class) + || ObservabilityPipelineCustomProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineCustomProcessor.class.equals(Float.class) + || ObservabilityPipelineCustomProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAddFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineCustomProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAddFieldsProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAddFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineAddFieldsProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAddFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineCustomProcessor'", e); } - // deserialize ObservabilityPipelineRemoveFieldsProcessor + // deserialize ObservabilityPipelineDatadogTagsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class); + .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'", e); } - // deserialize ObservabilityPipelineRenameFieldsProcessor + // deserialize ObservabilityPipelineDedupeProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineDedupeProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'", e); } - // deserialize ObservabilityPipelineGenerateMetricsProcessor + // deserialize ObservabilityPipelineEnrichmentTableProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); + .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); + "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", + "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'", e); } - // deserialize ObservabilityPipelineSampleProcessor + // deserialize ObservabilityPipelineFilterProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class) - || ObservabilityPipelineSampleProcessor.class.equals(Long.class) - || ObservabilityPipelineSampleProcessor.class.equals(Float.class) - || ObservabilityPipelineSampleProcessor.class.equals(Double.class) - || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) - || ObservabilityPipelineSampleProcessor.class.equals(String.class)) { + if (ObservabilityPipelineFilterProcessor.class.equals(Integer.class) + || ObservabilityPipelineFilterProcessor.class.equals(Long.class) + || ObservabilityPipelineFilterProcessor.class.equals(Float.class) + || ObservabilityPipelineFilterProcessor.class.equals(Double.class) + || ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) + || ObservabilityPipelineFilterProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class) - || ObservabilityPipelineSampleProcessor.class.equals(Long.class)) + ((ObservabilityPipelineFilterProcessor.class.equals(Integer.class) + || ObservabilityPipelineFilterProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSampleProcessor.class.equals(Float.class) - || ObservabilityPipelineSampleProcessor.class.equals(Double.class)) + ((ObservabilityPipelineFilterProcessor.class.equals(Float.class) + || ObservabilityPipelineFilterProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + (ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSampleProcessor.class.equals(String.class) + (ObservabilityPipelineFilterProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFilterProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineFilterProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFilterProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSampleProcessor'", + "Input data does not match schema 'ObservabilityPipelineFilterProcessor'", e); } - // deserialize ObservabilityPipelineParseGrokProcessor + // deserialize ObservabilityPipelineGenerateMetricsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(String.class)) { + if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class)) + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class)) + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineParseGrokProcessor.class.equals(String.class) + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineParseGrokProcessor.class); + .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineParseGrokProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineParseGrokProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineParseGrokProcessor'", + "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", e); } - // deserialize ObservabilityPipelineSensitiveDataScannerProcessor + // deserialize ObservabilityPipelineOcsfMapperProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) { + if (ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( - Long.class)) + ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( - Double.class)) + ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + (ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class) + (ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class); + .readValueAs(ObservabilityPipelineOcsfMapperProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineOcsfMapperProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMapperProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'", + "Input data does not match schema 'ObservabilityPipelineOcsfMapperProcessor'", e); } - // deserialize ObservabilityPipelineOcsfMapperProcessor + // deserialize ObservabilityPipelineParseGrokProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class) + (ObservabilityPipelineParseGrokProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineOcsfMapperProcessor.class); + .readValueAs(ObservabilityPipelineParseGrokProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineOcsfMapperProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseGrokProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMapperProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseGrokProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineOcsfMapperProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseGrokProcessor'", e); } - // deserialize ObservabilityPipelineAddEnvVarsProcessor + // deserialize ObservabilityPipelineParseJSONProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class) + (ObservabilityPipelineParseJSONProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class); + .readValueAs(ObservabilityPipelineParseJSONProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'", e); } - // deserialize ObservabilityPipelineDedupeProcessor + // deserialize ObservabilityPipelineParseXMLProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Long.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Float.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Double.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) - || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseXMLProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Long.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Float.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Double.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseXMLProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseXMLProcessor.class.equals(Float.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseXMLProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineDedupeProcessor.class.equals(String.class) + (ObservabilityPipelineParseXMLProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineParseXMLProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseXMLProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseXMLProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseXMLProcessor'", e); } - // deserialize ObservabilityPipelineEnrichmentTableProcessor + // deserialize ObservabilityPipelineQuotaProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) { + if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)) + ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)) + ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class) + (ObservabilityPipelineQuotaProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class); + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'", + "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'", e); } @@ -851,154 +846,312 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } - // deserialize ObservabilityPipelineThrottleProcessor + // deserialize ObservabilityPipelineRemoveFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Long.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Float.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Double.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) - || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) { + if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)) + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)) + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineThrottleProcessor.class.equals(String.class) + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineThrottleProcessor.class); + .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'", + "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'", e); } - // deserialize ObservabilityPipelineCustomProcessor + // deserialize ObservabilityPipelineRenameFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class) - || ObservabilityPipelineCustomProcessor.class.equals(Long.class) - || ObservabilityPipelineCustomProcessor.class.equals(Float.class) - || ObservabilityPipelineCustomProcessor.class.equals(Double.class) - || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) - || ObservabilityPipelineCustomProcessor.class.equals(String.class)) { + if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class) - || ObservabilityPipelineCustomProcessor.class.equals(Long.class)) + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineCustomProcessor.class.equals(Float.class) - || ObservabilityPipelineCustomProcessor.class.equals(Double.class)) + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) + (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineCustomProcessor.class.equals(String.class) + (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'"); + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineCustomProcessor'", + "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'", e); } - // deserialize ObservabilityPipelineDatadogTagsProcessor + // deserialize ObservabilityPipelineSampleProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class) + || ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class) + || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSampleProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class) + (ObservabilityPipelineSampleProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSampleProcessor'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class); + .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'", + "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'", + e); + } + + // deserialize ObservabilityPipelineSplitArrayProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplitArrayProcessor.class.equals(Integer.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Long.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Float.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Double.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplitArrayProcessor.class.equals(Integer.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplitArrayProcessor.class.equals(Float.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplitArrayProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplitArrayProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSplitArrayProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplitArrayProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSplitArrayProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplitArrayProcessor'", + e); + } + + // deserialize ObservabilityPipelineThrottleProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineThrottleProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'", e); } @@ -1032,32 +1185,42 @@ public ObservabilityPipelineConfigProcessorItem() { super("oneOf", Boolean.FALSE); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineFilterProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddHostnameProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineFilterProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1067,7 +1230,7 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineGenerateMet setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1077,107 +1240,121 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseGrokPr setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem( - ObservabilityPipelineSensitiveDataScannerProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseXMLProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) { + public ObservabilityPipelineConfigProcessorItem( + ObservabilityPipelineSensitiveDataScannerProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSplitArrayProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } static { + schemas.put( + "ObservabilityPipelineAddEnvVarsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddFieldsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddHostnameProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineCustomProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDatadogTagsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDedupeProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineEnrichmentTableProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineFilterProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineGenerateMetricsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineOcsfMapperProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseGrokProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineParseJSONProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseXMLProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineQuotaProcessor", new GenericType() {}); schemas.put( - "ObservabilityPipelineAddFieldsProcessor", - new GenericType() {}); + "ObservabilityPipelineReduceProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineRemoveFieldsProcessor", new GenericType() {}); schemas.put( "ObservabilityPipelineRenameFieldsProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineGenerateMetricsProcessor", - new GenericType() {}); schemas.put( "ObservabilityPipelineSampleProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineParseGrokProcessor", - new GenericType() {}); schemas.put( "ObservabilityPipelineSensitiveDataScannerProcessor", new GenericType() {}); schemas.put( - "ObservabilityPipelineOcsfMapperProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineAddEnvVarsProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineDedupeProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineEnrichmentTableProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineReduceProcessor", - new GenericType() {}); + "ObservabilityPipelineSplitArrayProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineThrottleProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineCustomProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineDatadogTagsProcessor", - new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigProcessorItem.class, Collections.unmodifiableMap(schemas)); } @@ -1189,16 +1366,17 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid - * against the oneOf child schemas: ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, - * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, - * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, - * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor, - * ObservabilityPipelineDatadogTagsProcessor + * against the oneOf child schemas: ObservabilityPipelineAddEnvVarsProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor, + * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor, + * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, + * ObservabilityPipelineFilterProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1206,94 +1384,109 @@ public Map getSchemas() { @Override public void setActualInstance(Object instance) { if (JSON.isInstanceOf( - ObservabilityPipelineFilterProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineAddHostnameProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineFilterProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineParseGrokProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineParseGrokProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSensitiveDataScannerProcessor.class, - instance, - new HashSet>())) { + ObservabilityPipelineParseXMLProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSensitiveDataScannerProcessor.class, + instance, + new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSplitArrayProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1303,43 +1496,46 @@ public void setActualInstance(Object instance) { return; } throw new RuntimeException( - "Invalid instance type. Must be ObservabilityPipelineFilterProcessor," - + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor," - + " ObservabilityPipelineAddFieldsProcessor," - + " ObservabilityPipelineRemoveFieldsProcessor," - + " ObservabilityPipelineRenameFieldsProcessor," - + " ObservabilityPipelineGenerateMetricsProcessor," - + " ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor," - + " ObservabilityPipelineSensitiveDataScannerProcessor," - + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor," + "Invalid instance type. Must be ObservabilityPipelineAddEnvVarsProcessor," + + " ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor," + + " ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor," + " ObservabilityPipelineDedupeProcessor," + " ObservabilityPipelineEnrichmentTableProcessor," - + " ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor," - + " ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor"); + + " ObservabilityPipelineFilterProcessor," + + " ObservabilityPipelineGenerateMetricsProcessor," + + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor," + + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor," + + " ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor," + + " ObservabilityPipelineRemoveFieldsProcessor," + + " ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineSampleProcessor," + + " ObservabilityPipelineSensitiveDataScannerProcessor," + + " ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor"); } /** - * Get the actual instance, which can be the following: ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, - * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, - * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, - * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor, - * ObservabilityPipelineDatadogTagsProcessor + * Get the actual instance, which can be the following: ObservabilityPipelineAddEnvVarsProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor, + * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor, + * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, + * ObservabilityPipelineFilterProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor * - * @return The actual instance (ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, - * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor, + * @return The actual instance (ObservabilityPipelineAddEnvVarsProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor, + * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor, * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, - * ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor, - * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor) + * ObservabilityPipelineFilterProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor) */ @Override public Object getActualInstance() { @@ -1347,75 +1543,101 @@ public Object getActualInstance() { } /** - * Get the actual instance of `ObservabilityPipelineFilterProcessor`. If the actual instance is - * not `ObservabilityPipelineFilterProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance + * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineFilterProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineFilterProcessor` + * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor` */ - public ObservabilityPipelineFilterProcessor getObservabilityPipelineFilterProcessor() + public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor() throws ClassCastException { - return (ObservabilityPipelineFilterProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is - * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddFieldsProcessor`. If the actual instance is + * not `ObservabilityPipelineAddFieldsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineParseJSONProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor` + * @return The actual instance of `ObservabilityPipelineAddFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddFieldsProcessor` */ - public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor() + public ObservabilityPipelineAddFieldsProcessor getObservabilityPipelineAddFieldsProcessor() throws ClassCastException { - return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddFieldsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not - * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddHostnameProcessor`. If the actual instance + * is not `ObservabilityPipelineAddHostnameProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineQuotaProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor` + * @return The actual instance of `ObservabilityPipelineAddHostnameProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddHostnameProcessor` */ - public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor() + public ObservabilityPipelineAddHostnameProcessor getObservabilityPipelineAddHostnameProcessor() throws ClassCastException { - return (ObservabilityPipelineQuotaProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddHostnameProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineAddFieldsProcessor`. If the actual instance is - * not `ObservabilityPipelineAddFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is + * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAddFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineAddFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineCustomProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor` */ - public ObservabilityPipelineAddFieldsProcessor getObservabilityPipelineAddFieldsProcessor() + public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor() throws ClassCastException { - return (ObservabilityPipelineAddFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineCustomProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance - * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance + * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor` */ - public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor() + public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor() throws ClassCastException { - return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance - * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is + * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineDedupeProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor` */ - public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor() + public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor() throws ClassCastException { - return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineDedupeProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual + * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineEnrichmentTableProcessor` + */ + public ObservabilityPipelineEnrichmentTableProcessor + getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException { + return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineFilterProcessor`. If the actual instance is + * not `ObservabilityPipelineFilterProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineFilterProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineFilterProcessor` + */ + public ObservabilityPipelineFilterProcessor getObservabilityPipelineFilterProcessor() + throws ClassCastException { + return (ObservabilityPipelineFilterProcessor) super.getActualInstance(); } /** @@ -1433,15 +1655,15 @@ public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRename } /** - * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is - * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance + * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSampleProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor` + * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor` */ - public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor() + public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor() throws ClassCastException { - return (ObservabilityPipelineSampleProcessor) super.getActualInstance(); + return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance(); } /** @@ -1457,114 +1679,124 @@ public ObservabilityPipelineParseGrokProcessor getObservabilityPipelineParseGrok } /** - * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual - * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException - * will be thrown. + * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is + * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineSensitiveDataScannerProcessor` + * @return The actual instance of `ObservabilityPipelineParseJSONProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor` */ - public ObservabilityPipelineSensitiveDataScannerProcessor - getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException { - return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance(); + public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor() + throws ClassCastException { + return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance - * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineParseXMLProcessor`. If the actual instance is + * not `ObservabilityPipelineParseXMLProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor` + * @return The actual instance of `ObservabilityPipelineParseXMLProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseXMLProcessor` */ - public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor() + public ObservabilityPipelineParseXMLProcessor getObservabilityPipelineParseXMLProcessor() throws ClassCastException { - return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance(); + return (ObservabilityPipelineParseXMLProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance - * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not + * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor` + * @return The actual instance of `ObservabilityPipelineQuotaProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor` */ - public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor() + public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor() throws ClassCastException { - return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance(); + return (ObservabilityPipelineQuotaProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is - * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is + * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineDedupeProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor` + * @return The actual instance of `ObservabilityPipelineReduceProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor` */ - public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor() + public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor() throws ClassCastException { - return (ObservabilityPipelineDedupeProcessor) super.getActualInstance(); + return (ObservabilityPipelineReduceProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual - * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be - * thrown. + * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineEnrichmentTableProcessor` + * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor` */ - public ObservabilityPipelineEnrichmentTableProcessor - getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException { - return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance(); + public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor() + throws ClassCastException { + return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is - * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineReduceProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor` + * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor` */ - public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor() + public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor() throws ClassCastException { - return (ObservabilityPipelineReduceProcessor) super.getActualInstance(); + return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineThrottleProcessor`. If the actual instance is - * not `ObservabilityPipelineThrottleProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is + * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineThrottleProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineThrottleProcessor` + * @return The actual instance of `ObservabilityPipelineSampleProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor` */ - public ObservabilityPipelineThrottleProcessor getObservabilityPipelineThrottleProcessor() + public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor() throws ClassCastException { - return (ObservabilityPipelineThrottleProcessor) super.getActualInstance(); + return (ObservabilityPipelineSampleProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is - * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual + * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException + * will be thrown. * - * @return The actual instance of `ObservabilityPipelineCustomProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor` + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessor` */ - public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor() + public ObservabilityPipelineSensitiveDataScannerProcessor + getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSplitArrayProcessor`. If the actual instance + * is not `ObservabilityPipelineSplitArrayProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplitArrayProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplitArrayProcessor` + */ + public ObservabilityPipelineSplitArrayProcessor getObservabilityPipelineSplitArrayProcessor() throws ClassCastException { - return (ObservabilityPipelineCustomProcessor) super.getActualInstance(); + return (ObservabilityPipelineSplitArrayProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance - * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineThrottleProcessor`. If the actual instance is + * not `ObservabilityPipelineThrottleProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor` + * @return The actual instance of `ObservabilityPipelineThrottleProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineThrottleProcessor` */ - public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor() + public ObservabilityPipelineThrottleProcessor getObservabilityPipelineThrottleProcessor() throws ClassCastException { - return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance(); + return (ObservabilityPipelineThrottleProcessor) super.getActualInstance(); } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java new file mode 100644 index 00000000000..5cd32403878 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java @@ -0,0 +1,503 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The parse_xml processor parses XML from a specified field and extracts it into the + * event. + */ +@JsonPropertyOrder({ + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ALWAYS_USE_TEXT_KEY, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ATTR_PREFIX, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_FIELD, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_INCLUDE_ATTR, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_BOOL, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_NULL, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_NUMBER, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_TEXT_KEY, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseXMLProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ALWAYS_USE_TEXT_KEY = "always_use_text_key"; + private Boolean alwaysUseTextKey; + + public static final String JSON_PROPERTY_ATTR_PREFIX = "attr_prefix"; + private String attrPrefix; + + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INCLUDE_ATTR = "include_attr"; + private Boolean includeAttr; + + public static final String JSON_PROPERTY_PARSE_BOOL = "parse_bool"; + private Boolean parseBool; + + public static final String JSON_PROPERTY_PARSE_NULL = "parse_null"; + private Boolean parseNull; + + public static final String JSON_PROPERTY_PARSE_NUMBER = "parse_number"; + private Boolean parseNumber; + + public static final String JSON_PROPERTY_TEXT_KEY = "text_key"; + private String textKey; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineParseXMLProcessorType type = + ObservabilityPipelineParseXMLProcessorType.PARSE_XML; + + public ObservabilityPipelineParseXMLProcessor() {} + + @JsonCreator + public ObservabilityPipelineParseXMLProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineParseXMLProcessorType type) { + this.enabled = enabled; + this.field = field; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineParseXMLProcessor alwaysUseTextKey(Boolean alwaysUseTextKey) { + this.alwaysUseTextKey = alwaysUseTextKey; + return this; + } + + /** + * Whether to always use a text key for element content. + * + * @return alwaysUseTextKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ALWAYS_USE_TEXT_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getAlwaysUseTextKey() { + return alwaysUseTextKey; + } + + public void setAlwaysUseTextKey(Boolean alwaysUseTextKey) { + this.alwaysUseTextKey = alwaysUseTextKey; + } + + public ObservabilityPipelineParseXMLProcessor attrPrefix(String attrPrefix) { + this.attrPrefix = attrPrefix; + return this; + } + + /** + * The prefix to use for XML attributes in the parsed output. + * + * @return attrPrefix + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ATTR_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getAttrPrefix() { + return attrPrefix; + } + + public void setAttrPrefix(String attrPrefix) { + this.attrPrefix = attrPrefix; + } + + public ObservabilityPipelineParseXMLProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineParseXMLProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineParseXMLProcessor field(String field) { + this.field = field; + return this; + } + + /** + * The name of the log field that contains an XML string. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineParseXMLProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineParseXMLProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineParseXMLProcessor includeAttr(Boolean includeAttr) { + this.includeAttr = includeAttr; + return this; + } + + /** + * Whether to include XML attributes in the parsed output. + * + * @return includeAttr + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_INCLUDE_ATTR) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getIncludeAttr() { + return includeAttr; + } + + public void setIncludeAttr(Boolean includeAttr) { + this.includeAttr = includeAttr; + } + + public ObservabilityPipelineParseXMLProcessor parseBool(Boolean parseBool) { + this.parseBool = parseBool; + return this; + } + + /** + * Whether to parse boolean values from strings. + * + * @return parseBool + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_BOOL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseBool() { + return parseBool; + } + + public void setParseBool(Boolean parseBool) { + this.parseBool = parseBool; + } + + public ObservabilityPipelineParseXMLProcessor parseNull(Boolean parseNull) { + this.parseNull = parseNull; + return this; + } + + /** + * Whether to parse null values. + * + * @return parseNull + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_NULL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseNull() { + return parseNull; + } + + public void setParseNull(Boolean parseNull) { + this.parseNull = parseNull; + } + + public ObservabilityPipelineParseXMLProcessor parseNumber(Boolean parseNumber) { + this.parseNumber = parseNumber; + return this; + } + + /** + * Whether to parse numeric values from strings. + * + * @return parseNumber + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_NUMBER) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseNumber() { + return parseNumber; + } + + public void setParseNumber(Boolean parseNumber) { + this.parseNumber = parseNumber; + } + + public ObservabilityPipelineParseXMLProcessor textKey(String textKey) { + this.textKey = textKey; + return this; + } + + /** + * The key name to use for text content within XML elements. Must be at least 1 character if + * specified. + * + * @return textKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TEXT_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getTextKey() { + return textKey; + } + + public void setTextKey(String textKey) { + this.textKey = textKey; + } + + public ObservabilityPipelineParseXMLProcessor type( + ObservabilityPipelineParseXMLProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be parse_xml. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineParseXMLProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineParseXMLProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseXMLProcessor + */ + @JsonAnySetter + public ObservabilityPipelineParseXMLProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineParseXMLProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseXMLProcessor observabilityPipelineParseXmlProcessor = + (ObservabilityPipelineParseXMLProcessor) o; + return Objects.equals( + this.alwaysUseTextKey, observabilityPipelineParseXmlProcessor.alwaysUseTextKey) + && Objects.equals(this.attrPrefix, observabilityPipelineParseXmlProcessor.attrPrefix) + && Objects.equals(this.displayName, observabilityPipelineParseXmlProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineParseXmlProcessor.enabled) + && Objects.equals(this.field, observabilityPipelineParseXmlProcessor.field) + && Objects.equals(this.id, observabilityPipelineParseXmlProcessor.id) + && Objects.equals(this.include, observabilityPipelineParseXmlProcessor.include) + && Objects.equals(this.includeAttr, observabilityPipelineParseXmlProcessor.includeAttr) + && Objects.equals(this.parseBool, observabilityPipelineParseXmlProcessor.parseBool) + && Objects.equals(this.parseNull, observabilityPipelineParseXmlProcessor.parseNull) + && Objects.equals(this.parseNumber, observabilityPipelineParseXmlProcessor.parseNumber) + && Objects.equals(this.textKey, observabilityPipelineParseXmlProcessor.textKey) + && Objects.equals(this.type, observabilityPipelineParseXmlProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineParseXmlProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + alwaysUseTextKey, + attrPrefix, + displayName, + enabled, + field, + id, + include, + includeAttr, + parseBool, + parseNull, + parseNumber, + textKey, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseXMLProcessor {\n"); + sb.append(" alwaysUseTextKey: ").append(toIndentedString(alwaysUseTextKey)).append("\n"); + sb.append(" attrPrefix: ").append(toIndentedString(attrPrefix)).append("\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" includeAttr: ").append(toIndentedString(includeAttr)).append("\n"); + sb.append(" parseBool: ").append(toIndentedString(parseBool)).append("\n"); + sb.append(" parseNull: ").append(toIndentedString(parseNull)).append("\n"); + sb.append(" parseNumber: ").append(toIndentedString(parseNumber)).append("\n"); + sb.append(" textKey: ").append(toIndentedString(textKey)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java new file mode 100644 index 00000000000..8101a254703 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be parse_xml. */ +@JsonSerialize( + using = + ObservabilityPipelineParseXMLProcessorType + .ObservabilityPipelineParseXMLProcessorTypeSerializer.class) +public class ObservabilityPipelineParseXMLProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("parse_xml")); + + public static final ObservabilityPipelineParseXMLProcessorType PARSE_XML = + new ObservabilityPipelineParseXMLProcessorType("parse_xml"); + + ObservabilityPipelineParseXMLProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineParseXMLProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineParseXMLProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineParseXMLProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineParseXMLProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineParseXMLProcessorType fromValue(String value) { + return new ObservabilityPipelineParseXMLProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java new file mode 100644 index 00000000000..e6487f59fd8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java @@ -0,0 +1,314 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The split_array processor splits array fields into separate events based on + * configured rules. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ARRAYS, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplitArrayProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ARRAYS = "arrays"; + private List arrays = new ArrayList<>(); + + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplitArrayProcessorType type = + ObservabilityPipelineSplitArrayProcessorType.SPLIT_ARRAY; + + public ObservabilityPipelineSplitArrayProcessor() {} + + @JsonCreator + public ObservabilityPipelineSplitArrayProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ARRAYS) + List arrays, + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplitArrayProcessorType type) { + this.arrays = arrays; + this.enabled = enabled; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplitArrayProcessor arrays( + List arrays) { + this.arrays = arrays; + for (ObservabilityPipelineSplitArrayProcessorArrayConfig item : arrays) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineSplitArrayProcessor addArraysItem( + ObservabilityPipelineSplitArrayProcessorArrayConfig arraysItem) { + this.arrays.add(arraysItem); + this.unparsed |= arraysItem.unparsed; + return this; + } + + /** + * A list of array split configurations. + * + * @return arrays + */ + @JsonProperty(JSON_PROPERTY_ARRAYS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getArrays() { + return arrays; + } + + public void setArrays(List arrays) { + this.arrays = arrays; + } + + public ObservabilityPipelineSplitArrayProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineSplitArrayProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineSplitArrayProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplitArrayProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. For split_array, + * this should typically be *. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineSplitArrayProcessor type( + ObservabilityPipelineSplitArrayProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be split_array. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplitArrayProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplitArrayProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplitArrayProcessor + */ + @JsonAnySetter + public ObservabilityPipelineSplitArrayProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplitArrayProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplitArrayProcessor observabilityPipelineSplitArrayProcessor = + (ObservabilityPipelineSplitArrayProcessor) o; + return Objects.equals(this.arrays, observabilityPipelineSplitArrayProcessor.arrays) + && Objects.equals(this.displayName, observabilityPipelineSplitArrayProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineSplitArrayProcessor.enabled) + && Objects.equals(this.id, observabilityPipelineSplitArrayProcessor.id) + && Objects.equals(this.include, observabilityPipelineSplitArrayProcessor.include) + && Objects.equals(this.type, observabilityPipelineSplitArrayProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplitArrayProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(arrays, displayName, enabled, id, include, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplitArrayProcessor {\n"); + sb.append(" arrays: ").append(toIndentedString(arrays)).append("\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java new file mode 100644 index 00000000000..fab0f28e287 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java @@ -0,0 +1,180 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for a single array split operation. */ +@JsonPropertyOrder({ + ObservabilityPipelineSplitArrayProcessorArrayConfig.JSON_PROPERTY_FIELD, + ObservabilityPipelineSplitArrayProcessorArrayConfig.JSON_PROPERTY_INCLUDE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplitArrayProcessorArrayConfig { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public ObservabilityPipelineSplitArrayProcessorArrayConfig() {} + + @JsonCreator + public ObservabilityPipelineSplitArrayProcessorArrayConfig( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include) { + this.field = field; + this.include = include; + } + + public ObservabilityPipelineSplitArrayProcessorArrayConfig field(String field) { + this.field = field; + return this; + } + + /** + * The path to the array field to split. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineSplitArrayProcessorArrayConfig include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this array split operation targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplitArrayProcessorArrayConfig + */ + @JsonAnySetter + public ObservabilityPipelineSplitArrayProcessorArrayConfig putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSplitArrayProcessorArrayConfig object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplitArrayProcessorArrayConfig + observabilityPipelineSplitArrayProcessorArrayConfig = + (ObservabilityPipelineSplitArrayProcessorArrayConfig) o; + return Objects.equals(this.field, observabilityPipelineSplitArrayProcessorArrayConfig.field) + && Objects.equals(this.include, observabilityPipelineSplitArrayProcessorArrayConfig.include) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplitArrayProcessorArrayConfig.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, include, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplitArrayProcessorArrayConfig {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java new file mode 100644 index 00000000000..ec68b2c2563 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be split_array. */ +@JsonSerialize( + using = + ObservabilityPipelineSplitArrayProcessorType + .ObservabilityPipelineSplitArrayProcessorTypeSerializer.class) +public class ObservabilityPipelineSplitArrayProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("split_array")); + + public static final ObservabilityPipelineSplitArrayProcessorType SPLIT_ARRAY = + new ObservabilityPipelineSplitArrayProcessorType("split_array"); + + ObservabilityPipelineSplitArrayProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplitArrayProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplitArrayProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplitArrayProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplitArrayProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplitArrayProcessorType fromValue(String value) { + return new ObservabilityPipelineSplitArrayProcessorType(value); + } +}