From c0640af4dcffa3827929d120f2f8d5a38ebba56d Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Wed, 31 Dec 2025 18:41:02 +0000 Subject: [PATCH] Regenerate client from commit aea672c of spec repo --- .generator/schemas/v2/openapi.yaml | 1408 ++++++++++++++--- features/scenarios_model_mapping.rb | 40 +- features/v2/given.json | 24 +- features/v2/observability_pipelines.feature | 4 +- features/v2/undo.json | 50 +- lib/datadog_api_client/configuration.rb | 12 +- lib/datadog_api_client/inflector.rb | 37 +- .../v2/api/observability_pipelines_api.rb | 6 +- .../v2/models/azure_storage_destination.rb | 2 + .../models/microsoft_sentinel_destination.rb | 2 + ...ability_pipeline_add_env_vars_processor.rb | 2 + ...rvability_pipeline_add_fields_processor.rb | 2 + ...ability_pipeline_add_hostname_processor.rb | 198 +++ ...ty_pipeline_add_hostname_processor_type.rb | 26 + ...ty_pipeline_amazon_data_firehose_source.rb | 2 + ...pipeline_amazon_open_search_destination.rb | 2 + ...vability_pipeline_amazon_s3_destination.rb | 2 + ...observability_pipeline_amazon_s3_source.rb | 2 + ...peline_amazon_security_lake_destination.rb | 2 + ...ability_pipeline_cloud_prem_destination.rb | 169 ++ ...ty_pipeline_cloud_prem_destination_type.rb | 26 + .../models/observability_pipeline_config.rb | 12 +- ...bility_pipeline_config_destination_item.rb | 30 +- ...rvability_pipeline_config_pipeline_type.rb | 27 + ...vability_pipeline_config_processor_item.rb | 24 +- ...servability_pipeline_config_source_item.rb | 21 +- ..._crowd_strike_next_gen_siem_destination.rb | 2 + ...observability_pipeline_custom_processor.rb | 2 + ...rvability_pipeline_datadog_agent_source.rb | 4 +- ...ility_pipeline_datadog_logs_destination.rb | 2 + ...ty_pipeline_datadog_metrics_destination.rb | 169 ++ ...peline_datadog_metrics_destination_type.rb | 26 + ...ability_pipeline_datadog_tags_processor.rb | 2 + ...observability_pipeline_dedupe_processor.rb | 2 + ...lity_pipeline_elasticsearch_destination.rb | 14 +- ...e_elasticsearch_destination_data_stream.rb | 125 ++ ...ity_pipeline_enrichment_table_processor.rb | 16 +- ...peline_enrichment_table_reference_table.rb | 156 ++ ...observability_pipeline_filter_processor.rb | 6 +- ...bservability_pipeline_fluent_bit_source.rb | 2 + .../observability_pipeline_fluentd_source.rb | 2 + ...ity_pipeline_generate_metrics_processor.rb | 2 + ...y_pipeline_google_chronicle_destination.rb | 2 + ...peline_google_cloud_storage_destination.rb | 2 + ...ity_pipeline_google_pub_sub_destination.rb | 2 + ...vability_pipeline_google_pub_sub_source.rb | 2 + ...bility_pipeline_http_client_destination.rb | 220 +++ ...e_http_client_destination_auth_strategy.rb | 28 + ...ine_http_client_destination_compression.rb | 123 ++ ...lient_destination_compression_algorithm.rb | 26 + ...peline_http_client_destination_encoding.rb | 26 + ...y_pipeline_http_client_destination_type.rb | 26 + ...servability_pipeline_http_client_source.rb | 2 + ...peline_http_client_source_auth_strategy.rb | 1 + ...servability_pipeline_http_server_source.rb | 2 + ...bservability_pipeline_kafka_destination.rb | 361 +++++ ..._pipeline_kafka_destination_compression.rb | 30 + ...ity_pipeline_kafka_destination_encoding.rb | 27 + ...ability_pipeline_kafka_destination_type.rb | 26 + ...ility_pipeline_kafka_librdkafka_option.rb} | 6 +- ...b => observability_pipeline_kafka_sasl.rb} | 6 +- ...vability_pipeline_kafka_sasl_mechanism.rb} | 2 +- .../observability_pipeline_kafka_source.rb | 6 +- .../observability_pipeline_logstash_source.rb | 2 + ...vability_pipeline_metric_tags_processor.rb | 229 +++ ...ity_pipeline_metric_tags_processor_rule.rb | 167 ++ ...eline_metric_tags_processor_rule_action.rb | 27 + ...ipeline_metric_tags_processor_rule_mode.rb | 26 + ...ity_pipeline_metric_tags_processor_type.rb | 26 + ...vability_pipeline_new_relic_destination.rb | 2 + ...vability_pipeline_ocsf_mapper_processor.rb | 2 + ...bility_pipeline_open_search_destination.rb | 2 + ...rvability_pipeline_opentelemetry_source.rb | 176 +++ ...lity_pipeline_opentelemetry_source_type.rb | 26 + ...rvability_pipeline_parse_grok_processor.rb | 2 + ...rvability_pipeline_parse_json_processor.rb | 2 + ...ervability_pipeline_parse_xml_processor.rb | 300 ++++ ...ility_pipeline_parse_xml_processor_type.rb | 26 + .../observability_pipeline_quota_processor.rb | 23 +- ...ipeline_quota_processor_overflow_action.rb | 2 +- ...observability_pipeline_reduce_processor.rb | 2 + ...bility_pipeline_remove_fields_processor.rb | 2 + ...bility_pipeline_rename_fields_processor.rb | 2 + ...ervability_pipeline_rsyslog_destination.rb | 2 + .../observability_pipeline_rsyslog_source.rb | 2 + ...observability_pipeline_sample_processor.rb | 51 +- ...peline_sensitive_data_scanner_processor.rb | 2 + ...canner_processor_custom_pattern_options.rb | 12 +- ...anner_processor_library_pattern_options.rb | 12 +- ...ility_pipeline_sentinel_one_destination.rb | 2 + ...servability_pipeline_socket_destination.rb | 2 + .../observability_pipeline_socket_source.rb | 2 + ...vability_pipeline_split_array_processor.rb | 229 +++ ...line_split_array_processor_array_config.rb | 144 ++ ...ity_pipeline_split_array_processor_type.rb | 26 + ...ability_pipeline_splunk_hec_destination.rb | 2 + ...bservability_pipeline_splunk_hec_source.rb | 2 + ...bservability_pipeline_splunk_tcp_source.rb | 2 + ...ability_pipeline_sumo_logic_destination.rb | 2 + ...bservability_pipeline_sumo_logic_source.rb | 2 + ...vability_pipeline_syslog_ng_destination.rb | 2 + ...observability_pipeline_syslog_ng_source.rb | 2 + ...servability_pipeline_throttle_processor.rb | 2 + 103 files changed, 4758 insertions(+), 380 deletions(-) create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb rename lib/datadog_api_client/v2/models/{observability_pipeline_kafka_source_librdkafka_option.rb => observability_pipeline_kafka_librdkafka_option.rb} (95%) rename lib/datadog_api_client/v2/models/{observability_pipeline_kafka_source_sasl.rb => observability_pipeline_kafka_sasl.rb} (95%) rename lib/datadog_api_client/v2/models/{observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb => observability_pipeline_kafka_sasl_mechanism.rb} (92%) create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b74841f..d24c2bf5f18a 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6677,8 +6677,11 @@ components: x-enum-varnames: - AZURE_SCAN_OPTIONS AzureStorageDestination: - description: The `azure_storage` destination forwards logs to an Azure Blob + description: 'The `azure_storage` destination forwards logs to an Azure Blob Storage container. + + + **Supported pipeline types:** logs' properties: blob_prefix: description: Optional prefix for blobs written to the container. @@ -6709,6 +6712,8 @@ components: - inputs - container_name type: object + x-pipeline-types: + - logs AzureStorageDestinationType: default: azure_storage description: The destination type. The value should always be `azure_storage`. @@ -33331,8 +33336,11 @@ components: - query type: object MicrosoftSentinelDestination: - description: The `microsoft_sentinel` destination forwards logs to Microsoft + description: 'The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + + + **Supported pipeline types:** logs' properties: client_id: description: Azure AD client ID used for authentication. @@ -33373,6 +33381,8 @@ components: - dcr_immutable_id - table type: object + x-pipeline-types: + - logs MicrosoftSentinelDestinationType: default: microsoft_sentinel description: The destination type. The value should always be `microsoft_sentinel`. @@ -35077,8 +35087,11 @@ components: - data type: object ObservabilityPipelineAddEnvVarsProcessor: - description: The `add_env_vars` processor adds environment variable values to - log events. + description: 'The `add_env_vars` processor adds environment variable values + to log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35110,6 +35123,8 @@ components: - variables - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddEnvVarsProcessorType: default: add_env_vars description: The processor type. The value should always be `add_env_vars`. @@ -35135,7 +35150,10 @@ components: - name type: object ObservabilityPipelineAddFieldsProcessor: - description: The `add_fields` processor adds static key-value fields to logs. + description: 'The `add_fields` processor adds static key-value fields to logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35169,6 +35187,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddFieldsProcessorType: default: add_fields description: The processor type. The value should always be `add_fields`. @@ -35178,8 +35198,53 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: 'The `add_hostname` processor adds the hostname to log events. + + + **Supported pipeline types:** logs' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: - description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + description: 'The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35197,6 +35262,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonDataFirehoseSourceType: default: amazon_data_firehose description: The source type. The value should always be `amazon_data_firehose`. @@ -35207,7 +35274,10 @@ components: x-enum-varnames: - AMAZON_DATA_FIREHOSE ObservabilityPipelineAmazonOpenSearchDestination: - description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + description: 'The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' @@ -35235,6 +35305,8 @@ components: - inputs - auth type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonOpenSearchDestinationAuth: description: 'Authentication settings for the Amazon OpenSearch destination. @@ -35278,8 +35350,11 @@ components: x-enum-varnames: - AMAZON_OPENSEARCH ObservabilityPipelineAmazonS3Destination: - description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + description: 'The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35320,6 +35395,8 @@ components: - region - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3DestinationStorageClass: description: S3 storage class. enum: @@ -35356,7 +35433,10 @@ components: ObservabilityPipelineAmazonS3Source: description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. - It supports AWS authentication and TLS encryption.' + It supports AWS authentication and TLS encryption. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35379,6 +35459,8 @@ components: - type - region type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3SourceType: default: amazon_s3 description: The source type. Always `amazon_s3`. @@ -35389,8 +35471,11 @@ components: x-enum-varnames: - AMAZON_S3 ObservabilityPipelineAmazonSecurityLakeDestination: - description: The `amazon_security_lake` destination sends your logs to Amazon + description: 'The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35430,6 +35515,8 @@ components: - region - custom_source_name type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonSecurityLakeDestinationType: default: amazon_security_lake description: The destination type. Always `amazon_security_lake`. @@ -35455,6 +35542,42 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: 'The `cloud_prem` destination sends logs to Datadog CloudPrem. + + + **Supported pipeline types:** logs' + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - logs + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35473,6 +35596,8 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array + pipeline_type: + $ref: '#/components/schemas/ObservabilityPipelineConfigPipelineType' processors: description: A list of processor groups that transform or enrich log data. example: @@ -35509,25 +35634,40 @@ components: ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/AzureStorageDestination' - - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineSocketDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestination' + ObservabilityPipelineConfigPipelineType: + default: logs + description: The type of data being ingested. Defaults to `logs` if not specified. + enum: + - logs + - metrics + example: logs + type: string + x-enum-varnames: + - LOGS + - METRICS ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35601,45 +35741,53 @@ components: description: A processor for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' - - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' - - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: - description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike + description: 'The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + + + **Supported pipeline types:** logs' properties: compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' @@ -35667,6 +35815,8 @@ components: - inputs - encoding type: object + x-pipeline-types: + - logs ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression: description: Compression configuration for log events. properties: @@ -35710,9 +35860,12 @@ components: x-enum-varnames: - CROWDSTRIKE_NEXT_GEN_SIEM ObservabilityPipelineCustomProcessor: - description: The `custom_processor` processor transforms events using [Vector + description: 'The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35746,6 +35899,8 @@ components: - remaps - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineCustomProcessorRemap: description: Defines a single VRL remap rule with its own filtering and transformation logic. @@ -35821,7 +35976,11 @@ components: - config type: object ObservabilityPipelineDatadogAgentSource: - description: The `datadog_agent` source collects logs from the Datadog Agent. + description: 'The `datadog_agent` source collects logs/metrics from the Datadog + Agent. + + + **Supported pipeline types:** logs, metrics' properties: id: description: The unique identifier for this component. Used to reference @@ -35837,6 +35996,9 @@ components: - id - type type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineDatadogAgentSourceType: default: datadog_agent description: The source type. The value should always be `datadog_agent`. @@ -35847,7 +36009,10 @@ components: x-enum-varnames: - DATADOG_AGENT ObservabilityPipelineDatadogLogsDestination: - description: The `datadog_logs` destination forwards logs to Datadog Log Management. + description: 'The `datadog_logs` destination forwards logs to Datadog Log Management. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -35868,6 +36033,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogLogsDestinationType: default: datadog_logs description: The destination type. The value should always be `datadog_logs`. @@ -35877,9 +36044,48 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDatadogMetricsDestination: + description: 'The `datadog_metrics` destination forwards metrics to Datadog. + + + **Supported pipeline types:** metrics' + properties: + id: + description: The unique identifier for this component. + example: datadog-metrics-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - metric-tags-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineDatadogMetricsDestinationType: + default: datadog_metrics + description: The destination type. The value should always be `datadog_metrics`. + enum: + - datadog_metrics + example: datadog_metrics + type: string + x-enum-varnames: + - DATADOG_METRICS ObservabilityPipelineDatadogTagsProcessor: - description: The `datadog_tags` processor includes or excludes specific Datadog + description: 'The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + + + **Supported pipeline types:** logs' properties: action: $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction' @@ -35922,6 +36128,8 @@ components: - keys - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogTagsProcessorAction: description: The action to take on tags with matching keys. enum: @@ -35964,7 +36172,10 @@ components: - DECODE_JSON - DECODE_SYSLOG ObservabilityPipelineDedupeProcessor: - description: The `dedupe` processor removes duplicate fields in log events. + description: 'The `dedupe` processor removes duplicate fields in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36001,6 +36212,8 @@ components: - mode - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDedupeProcessorMode: description: The deduplication mode to apply to the fields. enum: @@ -36021,8 +36234,11 @@ components: x-enum-varnames: - DEDUPE ObservabilityPipelineElasticsearchDestination: - description: The `elasticsearch` destination writes logs to an Elasticsearch + description: 'The `elasticsearch` destination writes logs to an Elasticsearch cluster. + + + **Supported pipeline types:** logs' properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' @@ -36030,6 +36246,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36049,6 +36267,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineElasticsearchDestinationApiVersion: description: The Elasticsearch API version to use. Set to `auto` to auto-detect. enum: @@ -36063,6 +36283,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36202,8 +36439,12 @@ components: - path type: object ObservabilityPipelineEnrichmentTableProcessor: - description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + description: 'The `enrichment_table` processor enriches logs using a static + CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36224,6 +36465,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36237,6 +36480,8 @@ components: - target - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineEnrichmentTableProcessorType: default: enrichment_table description: The processor type. The value should always be `enrichment_table`. @@ -36246,6 +36491,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36262,9 +36529,12 @@ components: - value type: object ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + description: 'The `filter` processor allows conditional processing of logs/metrics + based on a Datadog search query. Logs/metrics that match the `include` query + are passed through; others are discarded. + + + **Supported pipeline types:** logs, metrics' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36279,9 +36549,9 @@ components: example: filter-processor type: string include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. + description: A Datadog search query used to determine which logs/metrics + should pass through the filter. Logs/metrics that match this query continue + to downstream components; others are dropped. example: service:my-service type: string type: @@ -36292,6 +36562,9 @@ components: - include - enabled type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineFilterProcessorType: default: filter description: The processor type. The value should always be `filter`. @@ -36302,7 +36575,10 @@ components: x-enum-varnames: - FILTER ObservabilityPipelineFluentBitSource: - description: The `fluent_bit` source ingests logs from Fluent Bit. + description: 'The `fluent_bit` source ingests logs from Fluent Bit. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36318,6 +36594,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentBitSourceType: default: fluent_bit description: The source type. The value should always be `fluent_bit`. @@ -36328,7 +36606,10 @@ components: x-enum-varnames: - FLUENT_BIT ObservabilityPipelineFluentdSource: - description: The `fluentd` source ingests logs from a Fluentd-compatible service. + description: 'The `fluentd` source ingests logs from a Fluentd-compatible service. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36344,6 +36625,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentdSourceType: default: fluentd description: The source type. The value should always be `fluentd. @@ -36368,7 +36651,10 @@ components: from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by - log fields.' + log fields. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36398,6 +36684,8 @@ components: - type - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineGenerateMetricsProcessorType: default: generate_datadog_metrics description: The processor type. Always `generate_datadog_metrics`. @@ -36492,7 +36780,10 @@ components: - GAUGE - DISTRIBUTION ObservabilityPipelineGoogleChronicleDestination: - description: The `google_chronicle` destination sends logs to Google Chronicle. + description: 'The `google_chronicle` destination sends logs to Google Chronicle. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36526,6 +36817,8 @@ components: - inputs - customer_id type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleChronicleDestinationEncoding: description: The encoding format for the logs sent to Chronicle. enum: @@ -36549,7 +36842,10 @@ components: description: 'The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. - It requires a bucket name, GCP authentication, and metadata fields.' + It requires a bucket name, GCP authentication, and metadata fields. + + + **Supported pipeline types:** logs' properties: acl: $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' @@ -36591,6 +36887,8 @@ components: - bucket - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleCloudStorageDestinationAcl: description: Access control list setting for objects written to the bucket. enum: @@ -36633,8 +36931,11 @@ components: x-enum-varnames: - GOOGLE_CLOUD_STORAGE ObservabilityPipelineGooglePubSubDestination: - description: The `google_pubsub` destination publishes logs to a Google Cloud + description: 'The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36672,6 +36973,8 @@ components: - project - topic type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubDestinationEncoding: description: Encoding format for log events. enum: @@ -36692,8 +36995,11 @@ components: x-enum-varnames: - GOOGLE_PUBSUB ObservabilityPipelineGooglePubSubSource: - description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + description: 'The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36724,6 +37030,8 @@ components: - project - subscription type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubSourceType: default: google_pubsub description: The source type. The value should always be `google_pubsub`. @@ -36733,9 +37041,94 @@ components: type: string x-enum-varnames: - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientDestination: + description: 'The `http_client` destination sends data to an HTTP endpoint. + + + **Supported pipeline types:** logs, metrics' + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationAuthStrategy' + compression: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationEncoding' + id: + description: The unique identifier for this component. + example: http-client-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - filter-processor + items: + type: string + type: array + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationType' + required: + - id + - type + - inputs + - encoding + type: object + x-pipeline-types: + - logs + - metrics + ObservabilityPipelineHttpClientDestinationAuthStrategy: + description: HTTP authentication strategy. + enum: + - none + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - NONE + - BASIC + - BEARER + ObservabilityPipelineHttpClientDestinationCompression: + description: Compression configuration for HTTP requests. + properties: + algorithm: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + required: + - algorithm + type: object + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm: + description: Compression algorithm. + enum: + - gzip + example: gzip + type: string + x-enum-varnames: + - GZIP + ObservabilityPipelineHttpClientDestinationEncoding: + description: Encoding format for log events. + enum: + - json + example: json + type: string + x-enum-varnames: + - JSON + ObservabilityPipelineHttpClientDestinationType: + default: http_client + description: The destination type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT ObservabilityPipelineHttpClientSource: - description: The `http_client` source scrapes logs from HTTP endpoints at regular + description: 'The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' @@ -36766,14 +37159,18 @@ components: - type - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpClientSourceAuthStrategy: description: Optional authentication strategy for HTTP requests. enum: + - none - basic - bearer example: basic type: string x-enum-varnames: + - NONE - BASIC - BEARER ObservabilityPipelineHttpClientSourceType: @@ -36786,8 +37183,11 @@ components: x-enum-varnames: - HTTP_CLIENT ObservabilityPipelineHttpServerSource: - description: The `http_server` source collects logs over HTTP POST from external + description: 'The `http_server` source collects logs over HTTP POST from external services. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' @@ -36807,6 +37207,8 @@ components: - auth_strategy - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpServerSourceAuthStrategy: description: HTTP authentication method. enum: @@ -36826,8 +37228,161 @@ components: type: string x-enum-varnames: - HTTP_SERVER + ObservabilityPipelineKafkaDestination: + description: 'The `kafka` destination sends logs to Apache Kafka topics. + + + **Supported pipeline types:** logs' + properties: + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers + type: string + id: + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id + type: string + librdkafka_options: + description: Optional list of advanced Kafka producer configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' + required: + - id + - type + - inputs + - topic + - encoding + type: object + x-pipeline-types: + - logs + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' + type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + description: 'The `kafka` source ingests data from Apache Kafka topics. + + + **Supported pipeline types:** logs' properties: group_id: description: Consumer group ID used by the Kafka client. @@ -36843,10 +37398,10 @@ components: description: Optional list of advanced Kafka client configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' topics: @@ -36866,30 +37421,8 @@ components: - group_id - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. - properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' - type: string - required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' - type: object + x-pipeline-types: + - logs ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -36900,7 +37433,10 @@ components: x-enum-varnames: - KAFKA ObservabilityPipelineLogstashSource: - description: The `logstash` source ingests logs from a Logstash forwarder. + description: 'The `logstash` source ingests logs from a Logstash forwarder. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36916,6 +37452,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineLogstashSourceType: default: logstash description: The source type. The value should always be `logstash`. @@ -36940,13 +37478,106 @@ components: - name - value type: object + ObservabilityPipelineMetricTagsProcessor: + description: 'The `metric_tags` processor filters metrics based on their tags + using Datadog tag key patterns. + + + **Supported pipeline types:** metrics' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: metric-tags-processor + type: string + include: + description: A Datadog search query used to determine which metrics this + processor targets. + example: '*' + type: string + rules: + description: A list of rules for filtering metric tags. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRule' + maxItems: 100 + minItems: 1 + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorType' + required: + - id + - type + - include + - rules + - enabled + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineMetricTagsProcessorRule: + description: Defines a rule for filtering metric tags based on key patterns. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleAction' + keys: + description: A list of tag keys to include or exclude. + example: + - env + - service + - version + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleMode' + required: + - mode + - action + - keys + type: object + ObservabilityPipelineMetricTagsProcessorRuleAction: + description: The action to take on tags with matching keys. + enum: + - include + - exclude + example: include + type: string + x-enum-varnames: + - INCLUDE + - EXCLUDE + ObservabilityPipelineMetricTagsProcessorRuleMode: + description: The processing mode for tag filtering. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineMetricTagsProcessorType: + default: metric_tags + description: The processor type. The value should always be `metric_tags`. + enum: + - metric_tags + example: metric_tags + type: string + x-enum-varnames: + - METRIC_TAGS ObservabilityPipelineMetricValue: description: Specifies how the value of the generated metric is computed. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' ObservabilityPipelineNewRelicDestination: - description: The `new_relic` destination sends logs to the New Relic platform. + description: 'The `new_relic` destination sends logs to the New Relic platform. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -36970,6 +37601,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineNewRelicDestinationRegion: description: The New Relic region. enum: @@ -36990,8 +37623,11 @@ components: x-enum-varnames: - NEW_RELIC ObservabilityPipelineOcsfMapperProcessor: - description: The `ocsf_mapper` processor transforms logs into the OCSF schema + description: 'The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37023,6 +37659,8 @@ components: - mappings - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineOcsfMapperProcessorMapping: description: Defines how specific events are transformed to OCSF using a mapping configuration. @@ -37082,7 +37720,10 @@ components: - OKTA_SYSTEM_LOG_AUTHENTICATION - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC ObservabilityPipelineOpenSearchDestination: - description: The `opensearch` destination writes logs to an OpenSearch cluster. + description: 'The `opensearch` destination writes logs to an OpenSearch cluster. + + + **Supported pipeline types:** logs' properties: bulk_index: description: The index to write logs to. @@ -37107,6 +37748,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineOpenSearchDestinationType: default: opensearch description: The destination type. The value should always be `opensearch`. @@ -37116,9 +37759,56 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: 'The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + + + **Supported pipeline types:** logs' + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + x-pipeline-types: + - logs + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: - description: The `parse_grok` processor extracts structured fields from unstructured + description: 'The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + + + **Supported pipeline types:** logs' properties: disable_library_rules: default: false @@ -37157,6 +37847,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseGrokProcessorRule: description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule defines how to extract structured fields @@ -37236,9 +37928,12 @@ components: x-enum-varnames: - PARSE_GROK ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field + description: 'The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37270,6 +37965,8 @@ components: - field - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseJSONProcessorType: default: parse_json description: The processor type. The value should always be `parse_json`. @@ -37279,28 +37976,92 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: 'The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + + + **Supported pipeline types:** logs' + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match + description: 'The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: @@ -37344,6 +38105,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37354,6 +38117,8 @@ components: - limit - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineQuotaProcessorLimit: description: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. @@ -37381,7 +38146,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37425,8 +38191,11 @@ components: x-enum-varnames: - QUOTA ObservabilityPipelineReduceProcessor: - description: The `reduce` processor aggregates and merges logs based on matching + description: 'The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37467,6 +38236,8 @@ components: - merge_strategies - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineReduceProcessorMergeStrategy: description: Defines how a specific field should be merged across grouped events. properties: @@ -37520,7 +38291,10 @@ components: x-enum-varnames: - REDUCE ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + description: 'The `remove_fields` processor deletes specified fields from logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37556,6 +38330,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRemoveFieldsProcessorType: default: remove_fields description: The processor type. The value should always be `remove_fields`. @@ -37566,7 +38342,10 @@ components: x-enum-varnames: - REMOVE_FIELDS ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + description: 'The `rename_fields` processor changes field names. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37601,6 +38380,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRenameFieldsProcessorField: description: Defines how to rename a field in log events. properties: @@ -37632,8 +38413,11 @@ components: x-enum-varnames: - RENAME_FIELDS ObservabilityPipelineRsyslogDestination: - description: The `rsyslog` destination forwards logs to an external `rsyslog` + description: 'The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37662,6 +38446,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogDestinationType: default: rsyslog description: The destination type. The value should always be `rsyslog`. @@ -37672,8 +38458,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineRsyslogSource: - description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` - server using the syslog protocol. + description: 'The `rsyslog` source listens for logs over TCP or UDP from an + `rsyslog` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -37692,6 +38481,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogSourceType: default: rsyslog description: The source type. The value should always be `rsyslog`. @@ -37702,8 +38493,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineSampleProcessor: - description: The `sample` processor allows probabilistic sampling of logs at + description: 'The `sample` processor allows probabilistic sampling of logs at a fixed rate. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37711,6 +38505,16 @@ components: description: Whether this processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -37727,20 +38531,17 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSampleProcessorType: default: sample description: The processor type. The value should always be `sample`. @@ -37751,8 +38552,11 @@ components: x-enum-varnames: - SAMPLE ObservabilityPipelineSensitiveDataScannerProcessor: - description: The `sensitive_data_scanner` processor detects and optionally redacts - sensitive data in log events. + description: 'The `sensitive_data_scanner` processor detects and optionally + redacts sensitive data in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37786,6 +38590,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSensitiveDataScannerProcessorAction: description: Defines what action to take when sensitive data is matched. oneOf: @@ -37907,6 +38713,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -37962,6 +38773,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38103,7 +38919,10 @@ components: x-enum-varnames: - SENSITIVE_DATA_SCANNER ObservabilityPipelineSentinelOneDestination: - description: The `sentinel_one` destination sends logs to SentinelOne. + description: 'The `sentinel_one` destination sends logs to SentinelOne. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38127,6 +38946,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineSentinelOneDestinationRegion: description: The SentinelOne region to send logs to. enum: @@ -38151,8 +38972,11 @@ components: x-enum-varnames: - SENTINEL_ONE ObservabilityPipelineSocketDestination: - description: The `socket` destination sends logs over TCP or UDP to a remote + description: 'The `socket` destination sends logs over TCP or UDP to a remote server. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' @@ -38185,6 +39009,8 @@ components: - framing - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketDestinationEncoding: description: Encoding format for log events. enum: @@ -38279,7 +39105,10 @@ components: x-enum-varnames: - SOCKET ObservabilityPipelineSocketSource: - description: The `socket` source ingests logs over TCP or UDP. + description: 'The `socket` source ingests logs over TCP or UDP. + + + **Supported pipeline types:** logs' properties: framing: $ref: '#/components/schemas/ObservabilityPipelineSocketSourceFraming' @@ -38302,6 +39131,8 @@ components: - mode - framing type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketSourceFraming: description: Framing method configuration for the socket source. oneOf: @@ -38442,9 +39273,79 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: 'The `split_array` processor splits array fields into separate + events based on configured rules. + + + **Supported pipeline types:** logs' + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: - description: The `splunk_hec` destination forwards logs to Splunk using the + description: 'The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + + + **Supported pipeline types:** logs' properties: auto_extract_timestamp: description: 'If `true`, Splunk tries to extract timestamps from incoming @@ -38484,6 +39385,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecDestinationEncoding: description: Encoding format for log events. enum: @@ -38504,8 +39407,11 @@ components: x-enum-varnames: - SPLUNK_HEC ObservabilityPipelineSplunkHecSource: - description: The `splunk_hec` source implements the Splunk HTTP Event Collector + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38521,6 +39427,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecSourceType: default: splunk_hec description: The source type. Always `splunk_hec`. @@ -38534,7 +39442,10 @@ components: description: 'The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. - TLS is supported for secure transmission.' + TLS is supported for secure transmission. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38550,6 +39461,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkTcpSourceType: default: splunk_tcp description: The source type. Always `splunk_tcp`. @@ -38560,7 +39473,10 @@ components: x-enum-varnames: - SPLUNK_TCP ObservabilityPipelineSumoLogicDestination: - description: The `sumo_logic` destination forwards logs to Sumo Logic. + description: 'The `sumo_logic` destination forwards logs to Sumo Logic. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' @@ -38601,6 +39517,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicDestinationEncoding: description: The output encoding format. enum: @@ -38638,7 +39556,10 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSumoLogicSource: - description: The `sumo_logic` source receives logs from Sumo Logic collectors. + description: 'The `sumo_logic` source receives logs from Sumo Logic collectors. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38652,6 +39573,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicSourceType: default: sumo_logic description: The source type. The value should always be `sumo_logic`. @@ -38662,8 +39585,11 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSyslogNgDestination: - description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + description: 'The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38692,6 +39618,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgDestinationType: default: syslog_ng description: The destination type. The value should always be `syslog_ng`. @@ -38702,8 +39630,11 @@ components: x-enum-varnames: - SYSLOG_NG ObservabilityPipelineSyslogNgSource: - description: The `syslog_ng` source listens for logs over TCP or UDP from a + description: 'The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38722,6 +39653,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgSourceType: default: syslog_ng description: The source type. The value should always be `syslog_ng`. @@ -38742,8 +39675,11 @@ components: - TCP - UDP ObservabilityPipelineThrottleProcessor: - description: The `throttle` processor limits the number of events that pass + description: 'The `throttle` processor limits the number of events that pass through over a given time window. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -38789,6 +39725,8 @@ components: - window - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineThrottleProcessorType: default: throttle description: The processor type. The value should always be `throttle`. @@ -75879,6 +76817,103 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -79313,103 +80348,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: delete: description: Delete a pipeline. diff --git a/features/scenarios_model_mapping.rb b/features/scenarios_model_mapping.rb index 5980fcaf9919..573635c3b23e 100644 --- a/features/scenarios_model_mapping.rb +++ b/features/scenarios_model_mapping.rb @@ -2624,6 +2624,26 @@ "tags" => "String", "limit" => "Integer", }, + "v2.ListPipelines" => { + "page_size" => "Integer", + "page_number" => "Integer", + }, + "v2.CreatePipeline" => { + "body" => "ObservabilityPipelineSpec", + }, + "v2.ValidatePipeline" => { + "body" => "ObservabilityPipelineSpec", + }, + "v2.DeletePipeline" => { + "pipeline_id" => "String", + }, + "v2.GetPipeline" => { + "pipeline_id" => "String", + }, + "v2.UpdatePipeline" => { + "pipeline_id" => "String", + "body" => "ObservabilityPipeline", + }, "v2.CreateOnCallEscalationPolicy" => { "include" => "String", "body" => "EscalationPolicyCreateRequest", @@ -2965,26 +2985,6 @@ "agent_rule_id" => "String", "body" => "CloudWorkloadSecurityAgentRuleUpdateRequest", }, - "v2.ListPipelines" => { - "page_size" => "Integer", - "page_number" => "Integer", - }, - "v2.CreatePipeline" => { - "body" => "ObservabilityPipelineSpec", - }, - "v2.ValidatePipeline" => { - "body" => "ObservabilityPipelineSpec", - }, - "v2.DeletePipeline" => { - "pipeline_id" => "String", - }, - "v2.GetPipeline" => { - "pipeline_id" => "String", - }, - "v2.UpdatePipeline" => { - "pipeline_id" => "String", - "body" => "ObservabilityPipeline", - }, "v2.DeleteRestrictionPolicy" => { "resource_id" => "String", }, diff --git a/features/v2/given.json b/features/v2/given.json index aa08181879c7..f34bdccdee34 100644 --- a/features/v2/given.json +++ b/features/v2/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/features/v2/observability_pipelines.feature b/features/v2/observability_pipelines.feature index c43fa8b3b767..a9b17ec7fdf5 100644 --- a/features/v2/observability_pipelines.feature +++ b/features/v2/observability_pipelines.feature @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict diff --git a/features/v2/undo.json b/features/v2/undo.json index eb43f26dbec6..25c59473dad7 100644 --- a/features/v2/undo.json +++ b/features/v2/undo.json @@ -2838,6 +2838,31 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3443,31 +3468,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, "DeletePipeline": { "tag": "Observability Pipelines", "undo": { diff --git a/lib/datadog_api_client/configuration.rb b/lib/datadog_api_client/configuration.rb index 95ddf1221cda..05283f260c46 100644 --- a/lib/datadog_api_client/configuration.rb +++ b/lib/datadog_api_client/configuration.rb @@ -301,6 +301,12 @@ def initialize "v2.update_monitor_user_template": false, "v2.validate_existing_monitor_user_template": false, "v2.validate_monitor_user_template": false, + "v2.create_pipeline": false, + "v2.delete_pipeline": false, + "v2.get_pipeline": false, + "v2.list_pipelines": false, + "v2.update_pipeline": false, + "v2.validate_pipeline": false, "v2.list_role_templates": false, "v2.create_connection": false, "v2.delete_connection": false, @@ -312,12 +318,6 @@ def initialize "v2.query_event_filtered_users": false, "v2.query_users": false, "v2.update_connection": false, - "v2.create_pipeline": false, - "v2.delete_pipeline": false, - "v2.get_pipeline": false, - "v2.list_pipelines": false, - "v2.update_pipeline": false, - "v2.validate_pipeline": false, "v2.create_scorecard_outcomes_batch": false, "v2.create_scorecard_rule": false, "v2.delete_scorecard_rule": false, diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index 022391df7878..bc27df291a8a 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -3139,6 +3139,8 @@ def overrides "v2.observability_pipeline_add_env_vars_processor_variable" => "ObservabilityPipelineAddEnvVarsProcessorVariable", "v2.observability_pipeline_add_fields_processor" => "ObservabilityPipelineAddFieldsProcessor", "v2.observability_pipeline_add_fields_processor_type" => "ObservabilityPipelineAddFieldsProcessorType", + "v2.observability_pipeline_add_hostname_processor" => "ObservabilityPipelineAddHostnameProcessor", + "v2.observability_pipeline_add_hostname_processor_type" => "ObservabilityPipelineAddHostnameProcessorType", "v2.observability_pipeline_amazon_data_firehose_source" => "ObservabilityPipelineAmazonDataFirehoseSource", "v2.observability_pipeline_amazon_data_firehose_source_type" => "ObservabilityPipelineAmazonDataFirehoseSourceType", "v2.observability_pipeline_amazon_open_search_destination" => "ObservabilityPipelineAmazonOpenSearchDestination", @@ -3153,8 +3155,11 @@ def overrides "v2.observability_pipeline_amazon_security_lake_destination" => "ObservabilityPipelineAmazonSecurityLakeDestination", "v2.observability_pipeline_amazon_security_lake_destination_type" => "ObservabilityPipelineAmazonSecurityLakeDestinationType", "v2.observability_pipeline_aws_auth" => "ObservabilityPipelineAwsAuth", + "v2.observability_pipeline_cloud_prem_destination" => "ObservabilityPipelineCloudPremDestination", + "v2.observability_pipeline_cloud_prem_destination_type" => "ObservabilityPipelineCloudPremDestinationType", "v2.observability_pipeline_config" => "ObservabilityPipelineConfig", "v2.observability_pipeline_config_destination_item" => "ObservabilityPipelineConfigDestinationItem", + "v2.observability_pipeline_config_pipeline_type" => "ObservabilityPipelineConfigPipelineType", "v2.observability_pipeline_config_processor_group" => "ObservabilityPipelineConfigProcessorGroup", "v2.observability_pipeline_config_processor_item" => "ObservabilityPipelineConfigProcessorItem", "v2.observability_pipeline_config_source_item" => "ObservabilityPipelineConfigSourceItem", @@ -3172,6 +3177,8 @@ def overrides "v2.observability_pipeline_datadog_agent_source_type" => "ObservabilityPipelineDatadogAgentSourceType", "v2.observability_pipeline_datadog_logs_destination" => "ObservabilityPipelineDatadogLogsDestination", "v2.observability_pipeline_datadog_logs_destination_type" => "ObservabilityPipelineDatadogLogsDestinationType", + "v2.observability_pipeline_datadog_metrics_destination" => "ObservabilityPipelineDatadogMetricsDestination", + "v2.observability_pipeline_datadog_metrics_destination_type" => "ObservabilityPipelineDatadogMetricsDestinationType", "v2.observability_pipeline_datadog_tags_processor" => "ObservabilityPipelineDatadogTagsProcessor", "v2.observability_pipeline_datadog_tags_processor_action" => "ObservabilityPipelineDatadogTagsProcessorAction", "v2.observability_pipeline_datadog_tags_processor_mode" => "ObservabilityPipelineDatadogTagsProcessorMode", @@ -3182,6 +3189,7 @@ def overrides "v2.observability_pipeline_dedupe_processor_type" => "ObservabilityPipelineDedupeProcessorType", "v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination", "v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion", + "v2.observability_pipeline_elasticsearch_destination_data_stream" => "ObservabilityPipelineElasticsearchDestinationDataStream", "v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType", "v2.observability_pipeline_enrichment_table_file" => "ObservabilityPipelineEnrichmentTableFile", "v2.observability_pipeline_enrichment_table_file_encoding" => "ObservabilityPipelineEnrichmentTableFileEncoding", @@ -3193,6 +3201,7 @@ def overrides "v2.observability_pipeline_enrichment_table_geo_ip" => "ObservabilityPipelineEnrichmentTableGeoIp", "v2.observability_pipeline_enrichment_table_processor" => "ObservabilityPipelineEnrichmentTableProcessor", "v2.observability_pipeline_enrichment_table_processor_type" => "ObservabilityPipelineEnrichmentTableProcessorType", + "v2.observability_pipeline_enrichment_table_reference_table" => "ObservabilityPipelineEnrichmentTableReferenceTable", "v2.observability_pipeline_field_value" => "ObservabilityPipelineFieldValue", "v2.observability_pipeline_filter_processor" => "ObservabilityPipelineFilterProcessor", "v2.observability_pipeline_filter_processor_type" => "ObservabilityPipelineFilterProcessorType", @@ -3221,19 +3230,35 @@ def overrides "v2.observability_pipeline_google_pub_sub_destination_type" => "ObservabilityPipelineGooglePubSubDestinationType", "v2.observability_pipeline_google_pub_sub_source" => "ObservabilityPipelineGooglePubSubSource", "v2.observability_pipeline_google_pub_sub_source_type" => "ObservabilityPipelineGooglePubSubSourceType", + "v2.observability_pipeline_http_client_destination" => "ObservabilityPipelineHttpClientDestination", + "v2.observability_pipeline_http_client_destination_auth_strategy" => "ObservabilityPipelineHttpClientDestinationAuthStrategy", + "v2.observability_pipeline_http_client_destination_compression" => "ObservabilityPipelineHttpClientDestinationCompression", + "v2.observability_pipeline_http_client_destination_compression_algorithm" => "ObservabilityPipelineHttpClientDestinationCompressionAlgorithm", + "v2.observability_pipeline_http_client_destination_encoding" => "ObservabilityPipelineHttpClientDestinationEncoding", + "v2.observability_pipeline_http_client_destination_type" => "ObservabilityPipelineHttpClientDestinationType", "v2.observability_pipeline_http_client_source" => "ObservabilityPipelineHttpClientSource", "v2.observability_pipeline_http_client_source_auth_strategy" => "ObservabilityPipelineHttpClientSourceAuthStrategy", "v2.observability_pipeline_http_client_source_type" => "ObservabilityPipelineHttpClientSourceType", "v2.observability_pipeline_http_server_source" => "ObservabilityPipelineHttpServerSource", "v2.observability_pipeline_http_server_source_auth_strategy" => "ObservabilityPipelineHttpServerSourceAuthStrategy", "v2.observability_pipeline_http_server_source_type" => "ObservabilityPipelineHttpServerSourceType", + "v2.observability_pipeline_kafka_destination" => "ObservabilityPipelineKafkaDestination", + "v2.observability_pipeline_kafka_destination_compression" => "ObservabilityPipelineKafkaDestinationCompression", + "v2.observability_pipeline_kafka_destination_encoding" => "ObservabilityPipelineKafkaDestinationEncoding", + "v2.observability_pipeline_kafka_destination_type" => "ObservabilityPipelineKafkaDestinationType", + "v2.observability_pipeline_kafka_librdkafka_option" => "ObservabilityPipelineKafkaLibrdkafkaOption", + "v2.observability_pipeline_kafka_sasl" => "ObservabilityPipelineKafkaSasl", + "v2.observability_pipeline_kafka_sasl_mechanism" => "ObservabilityPipelineKafkaSaslMechanism", "v2.observability_pipeline_kafka_source" => "ObservabilityPipelineKafkaSource", - "v2.observability_pipeline_kafka_source_librdkafka_option" => "ObservabilityPipelineKafkaSourceLibrdkafkaOption", - "v2.observability_pipeline_kafka_source_sasl" => "ObservabilityPipelineKafkaSourceSasl", "v2.observability_pipeline_kafka_source_type" => "ObservabilityPipelineKafkaSourceType", "v2.observability_pipeline_logstash_source" => "ObservabilityPipelineLogstashSource", "v2.observability_pipeline_logstash_source_type" => "ObservabilityPipelineLogstashSourceType", "v2.observability_pipeline_metadata_entry" => "ObservabilityPipelineMetadataEntry", + "v2.observability_pipeline_metric_tags_processor" => "ObservabilityPipelineMetricTagsProcessor", + "v2.observability_pipeline_metric_tags_processor_rule" => "ObservabilityPipelineMetricTagsProcessorRule", + "v2.observability_pipeline_metric_tags_processor_rule_action" => "ObservabilityPipelineMetricTagsProcessorRuleAction", + "v2.observability_pipeline_metric_tags_processor_rule_mode" => "ObservabilityPipelineMetricTagsProcessorRuleMode", + "v2.observability_pipeline_metric_tags_processor_type" => "ObservabilityPipelineMetricTagsProcessorType", "v2.observability_pipeline_metric_value" => "ObservabilityPipelineMetricValue", "v2.observability_pipeline_new_relic_destination" => "ObservabilityPipelineNewRelicDestination", "v2.observability_pipeline_new_relic_destination_region" => "ObservabilityPipelineNewRelicDestinationRegion", @@ -3245,6 +3270,8 @@ def overrides "v2.observability_pipeline_ocsf_mapping_library" => "ObservabilityPipelineOcsfMappingLibrary", "v2.observability_pipeline_open_search_destination" => "ObservabilityPipelineOpenSearchDestination", "v2.observability_pipeline_open_search_destination_type" => "ObservabilityPipelineOpenSearchDestinationType", + "v2.observability_pipeline_opentelemetry_source" => "ObservabilityPipelineOpentelemetrySource", + "v2.observability_pipeline_opentelemetry_source_type" => "ObservabilityPipelineOpentelemetrySourceType", "v2.observability_pipeline_parse_grok_processor" => "ObservabilityPipelineParseGrokProcessor", "v2.observability_pipeline_parse_grok_processor_rule" => "ObservabilityPipelineParseGrokProcessorRule", "v2.observability_pipeline_parse_grok_processor_rule_match_rule" => "ObservabilityPipelineParseGrokProcessorRuleMatchRule", @@ -3252,7 +3279,8 @@ def overrides "v2.observability_pipeline_parse_grok_processor_type" => "ObservabilityPipelineParseGrokProcessorType", "v2.observability_pipeline_parse_json_processor" => "ObservabilityPipelineParseJSONProcessor", "v2.observability_pipeline_parse_json_processor_type" => "ObservabilityPipelineParseJSONProcessorType", - "v2.observability_pipeline_pipeline_kafka_source_sasl_mechanism" => "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", + "v2.observability_pipeline_parse_xml_processor" => "ObservabilityPipelineParseXMLProcessor", + "v2.observability_pipeline_parse_xml_processor_type" => "ObservabilityPipelineParseXMLProcessorType", "v2.observability_pipeline_quota_processor" => "ObservabilityPipelineQuotaProcessor", "v2.observability_pipeline_quota_processor_limit" => "ObservabilityPipelineQuotaProcessorLimit", "v2.observability_pipeline_quota_processor_limit_enforce_type" => "ObservabilityPipelineQuotaProcessorLimitEnforceType", @@ -3333,6 +3361,9 @@ def overrides "v2.observability_pipeline_socket_source_type" => "ObservabilityPipelineSocketSourceType", "v2.observability_pipeline_spec" => "ObservabilityPipelineSpec", "v2.observability_pipeline_spec_data" => "ObservabilityPipelineSpecData", + "v2.observability_pipeline_split_array_processor" => "ObservabilityPipelineSplitArrayProcessor", + "v2.observability_pipeline_split_array_processor_array_config" => "ObservabilityPipelineSplitArrayProcessorArrayConfig", + "v2.observability_pipeline_split_array_processor_type" => "ObservabilityPipelineSplitArrayProcessorType", "v2.observability_pipeline_splunk_hec_destination" => "ObservabilityPipelineSplunkHecDestination", "v2.observability_pipeline_splunk_hec_destination_encoding" => "ObservabilityPipelineSplunkHecDestinationEncoding", "v2.observability_pipeline_splunk_hec_destination_type" => "ObservabilityPipelineSplunkHecDestinationType", diff --git a/lib/datadog_api_client/v2/api/observability_pipelines_api.rb b/lib/datadog_api_client/v2/api/observability_pipelines_api.rb index 2bc926176292..dc1c17d03832 100644 --- a/lib/datadog_api_client/v2/api/observability_pipelines_api.rb +++ b/lib/datadog_api_client/v2/api/observability_pipelines_api.rb @@ -54,7 +54,7 @@ def create_pipeline_with_http_info(body, opts = {}) fail ArgumentError, "Missing the required parameter 'body' when calling ObservabilityPipelinesAPI.create_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines' + local_var_path = '/api/v2/obs-pipelines/pipelines' # query parameters query_params = opts[:query_params] || {} @@ -266,7 +266,7 @@ def list_pipelines_with_http_info(opts = {}) @api_client.config.logger.debug 'Calling API: ObservabilityPipelinesAPI.list_pipelines ...' end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines' + local_var_path = '/api/v2/obs-pipelines/pipelines' # query parameters query_params = opts[:query_params] || {} @@ -418,7 +418,7 @@ def validate_pipeline_with_http_info(body, opts = {}) fail ArgumentError, "Missing the required parameter 'body' when calling ObservabilityPipelinesAPI.validate_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines/validate' + local_var_path = '/api/v2/obs-pipelines/pipelines/validate' # query parameters query_params = opts[:query_params] || {} diff --git a/lib/datadog_api_client/v2/models/azure_storage_destination.rb b/lib/datadog_api_client/v2/models/azure_storage_destination.rb index ef29c2cade85..b17e732af9cb 100644 --- a/lib/datadog_api_client/v2/models/azure_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/azure_storage_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `azure_storage` destination forwards logs to an Azure Blob Storage container. + # + # **Supported pipeline types:** logs class AzureStorageDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb index 03266b733402..4948de659f07 100644 --- a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb +++ b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + # + # **Supported pipeline types:** logs class MicrosoftSentinelDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb index 03fce60c6e93..fd17f878e32a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `add_env_vars` processor adds environment variable values to log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAddEnvVarsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb index ee063e97ea95..cfc3ff081a7c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `add_fields` processor adds static key-value fields to logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAddFieldsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb new file mode 100644 index 000000000000..66576e310ea7 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb @@ -0,0 +1,198 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `add_hostname` processor adds the hostname to log events. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineAddHostnameProcessor + include BaseGenericModel + + # The display name for a component. + attr_accessor :display_name + + # Whether this processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # The processor type. The value should always be `add_hostname`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'type' => :'ObservabilityPipelineAddHostnameProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineAddHostnameProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [display_name, enabled, id, include, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb new file mode 100644 index 000000000000..5eca2a92ffb4 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `add_hostname`. + class ObservabilityPipelineAddHostnameProcessorType + include BaseEnumModel + + ADD_HOSTNAME = "add_hostname".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb index bd174706a449..ce0598e59e64 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonDataFirehoseSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb index ab8721363e09..44f1c6db5b6c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonOpenSearchDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb index 6e74bafcd33b..aec7f0bbcbfe 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonS3Destination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb index a764ab83045b..70f135727195 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `amazon_s3` source ingests logs from an Amazon S3 bucket. # It supports AWS authentication and TLS encryption. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonS3Source include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb index 6904637a9723..6bc480ae9973 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonSecurityLakeDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb new file mode 100644 index 000000000000..fcc35829a771 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb @@ -0,0 +1,169 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `cloud_prem` destination sends logs to Datadog CloudPrem. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineCloudPremDestination + include BaseGenericModel + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The destination type. The value should always be `cloud_prem`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'inputs' => :'inputs', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'inputs' => :'Array', + :'type' => :'ObservabilityPipelineCloudPremDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineCloudPremDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + inputs == o.inputs && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, inputs, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb new file mode 100644 index 000000000000..8f235942f08e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `cloud_prem`. + class ObservabilityPipelineCloudPremDestinationType + include BaseEnumModel + + CLOUD_PREM = "cloud_prem".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb index 04005274e028..1d3511feca5f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb @@ -24,6 +24,9 @@ class ObservabilityPipelineConfig # A list of destination components where processed logs are sent. attr_reader :destinations + # The type of data being ingested. Defaults to `logs` if not specified. + attr_accessor :pipeline_type + # A list of processor groups that transform or enrich log data. attr_accessor :processors @@ -37,6 +40,7 @@ class ObservabilityPipelineConfig def self.attribute_map { :'destinations' => :'destinations', + :'pipeline_type' => :'pipeline_type', :'processors' => :'processors', :'sources' => :'sources' } @@ -47,6 +51,7 @@ def self.attribute_map def self.openapi_types { :'destinations' => :'Array', + :'pipeline_type' => :'ObservabilityPipelineConfigPipelineType', :'processors' => :'Array', :'sources' => :'Array' } @@ -76,6 +81,10 @@ def initialize(attributes = {}) end end + if attributes.key?(:'pipeline_type') + self.pipeline_type = attributes[:'pipeline_type'] + end + if attributes.key?(:'processors') if (value = attributes[:'processors']).is_a?(Array) self.processors = value @@ -145,6 +154,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && destinations == o.destinations && + pipeline_type == o.pipeline_type && processors == o.processors && sources == o.sources && additional_properties == o.additional_properties @@ -154,7 +164,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [destinations, processors, sources, additional_properties].hash + [destinations, pipeline_type, processors, sources, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb index 00386a1aa190..43df429f92fb 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb @@ -26,25 +26,29 @@ class << self # List of class defined in oneOf (OpenAPI v3) def openapi_one_of [ - :'ObservabilityPipelineDatadogLogsDestination', + :'ObservabilityPipelineHttpClientDestination', + :'ObservabilityPipelineAmazonOpenSearchDestination', :'ObservabilityPipelineAmazonS3Destination', - :'ObservabilityPipelineGoogleCloudStorageDestination', - :'ObservabilityPipelineSplunkHecDestination', - :'ObservabilityPipelineSumoLogicDestination', - :'ObservabilityPipelineElasticsearchDestination', - :'ObservabilityPipelineRsyslogDestination', - :'ObservabilityPipelineSyslogNgDestination', + :'ObservabilityPipelineAmazonSecurityLakeDestination', :'AzureStorageDestination', - :'MicrosoftSentinelDestination', + :'ObservabilityPipelineCloudPremDestination', + :'ObservabilityPipelineCrowdStrikeNextGenSiemDestination', + :'ObservabilityPipelineDatadogLogsDestination', + :'ObservabilityPipelineElasticsearchDestination', :'ObservabilityPipelineGoogleChronicleDestination', + :'ObservabilityPipelineGoogleCloudStorageDestination', + :'ObservabilityPipelineGooglePubSubDestination', + :'ObservabilityPipelineKafkaDestination', + :'MicrosoftSentinelDestination', :'ObservabilityPipelineNewRelicDestination', - :'ObservabilityPipelineSentinelOneDestination', :'ObservabilityPipelineOpenSearchDestination', - :'ObservabilityPipelineAmazonOpenSearchDestination', + :'ObservabilityPipelineRsyslogDestination', + :'ObservabilityPipelineSentinelOneDestination', :'ObservabilityPipelineSocketDestination', - :'ObservabilityPipelineAmazonSecurityLakeDestination', - :'ObservabilityPipelineCrowdStrikeNextGenSiemDestination', - :'ObservabilityPipelineGooglePubSubDestination' + :'ObservabilityPipelineSplunkHecDestination', + :'ObservabilityPipelineSumoLogicDestination', + :'ObservabilityPipelineSyslogNgDestination', + :'ObservabilityPipelineDatadogMetricsDestination' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb new file mode 100644 index 000000000000..ef043dc88c52 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The type of data being ingested. Defaults to `logs` if not specified. + class ObservabilityPipelineConfigPipelineType + include BaseEnumModel + + LOGS = "logs".freeze + METRICS = "metrics".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb index 334f6e14d8a9..9c6c290c3c53 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb @@ -27,23 +27,27 @@ class << self def openapi_one_of [ :'ObservabilityPipelineFilterProcessor', + :'ObservabilityPipelineAddEnvVarsProcessor', + :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineAddHostnameProcessor', + :'ObservabilityPipelineCustomProcessor', + :'ObservabilityPipelineDatadogTagsProcessor', + :'ObservabilityPipelineDedupeProcessor', + :'ObservabilityPipelineEnrichmentTableProcessor', + :'ObservabilityPipelineGenerateMetricsProcessor', + :'ObservabilityPipelineOcsfMapperProcessor', + :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineParseJSONProcessor', + :'ObservabilityPipelineParseXMLProcessor', :'ObservabilityPipelineQuotaProcessor', - :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineReduceProcessor', :'ObservabilityPipelineRemoveFieldsProcessor', :'ObservabilityPipelineRenameFieldsProcessor', - :'ObservabilityPipelineGenerateMetricsProcessor', :'ObservabilityPipelineSampleProcessor', - :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineSensitiveDataScannerProcessor', - :'ObservabilityPipelineOcsfMapperProcessor', - :'ObservabilityPipelineAddEnvVarsProcessor', - :'ObservabilityPipelineDedupeProcessor', - :'ObservabilityPipelineEnrichmentTableProcessor', - :'ObservabilityPipelineReduceProcessor', + :'ObservabilityPipelineSplitArrayProcessor', :'ObservabilityPipelineThrottleProcessor', - :'ObservabilityPipelineCustomProcessor', - :'ObservabilityPipelineDatadogTagsProcessor' + :'ObservabilityPipelineMetricTagsProcessor' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb index 6c5e58312de6..74579da90735 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb @@ -26,22 +26,23 @@ class << self # List of class defined in oneOf (OpenAPI v3) def openapi_one_of [ - :'ObservabilityPipelineKafkaSource', :'ObservabilityPipelineDatadogAgentSource', - :'ObservabilityPipelineSplunkTcpSource', - :'ObservabilityPipelineSplunkHecSource', + :'ObservabilityPipelineAmazonDataFirehoseSource', :'ObservabilityPipelineAmazonS3Source', - :'ObservabilityPipelineFluentdSource', :'ObservabilityPipelineFluentBitSource', - :'ObservabilityPipelineHttpServerSource', - :'ObservabilityPipelineSumoLogicSource', - :'ObservabilityPipelineRsyslogSource', - :'ObservabilityPipelineSyslogNgSource', - :'ObservabilityPipelineAmazonDataFirehoseSource', + :'ObservabilityPipelineFluentdSource', :'ObservabilityPipelineGooglePubSubSource', :'ObservabilityPipelineHttpClientSource', + :'ObservabilityPipelineHttpServerSource', + :'ObservabilityPipelineKafkaSource', :'ObservabilityPipelineLogstashSource', - :'ObservabilityPipelineSocketSource' + :'ObservabilityPipelineRsyslogSource', + :'ObservabilityPipelineSocketSource', + :'ObservabilityPipelineSplunkHecSource', + :'ObservabilityPipelineSplunkTcpSource', + :'ObservabilityPipelineSumoLogicSource', + :'ObservabilityPipelineSyslogNgSource', + :'ObservabilityPipelineOpentelemetrySource' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb index 827d8cc8c390..3a24137f6c73 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + # + # **Supported pipeline types:** logs class ObservabilityPipelineCrowdStrikeNextGenSiemDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb index 5b554a17df55..4e9800ce0e5b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + # + # **Supported pipeline types:** logs class ObservabilityPipelineCustomProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb index 27b724373297..4147195c688d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb @@ -17,7 +17,9 @@ require 'time' module DatadogAPIClient::V2 - # The `datadog_agent` source collects logs from the Datadog Agent. + # The `datadog_agent` source collects logs/metrics from the Datadog Agent. + # + # **Supported pipeline types:** logs, metrics class ObservabilityPipelineDatadogAgentSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb index 3b08d1fd9c22..ea784246329d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `datadog_logs` destination forwards logs to Datadog Log Management. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDatadogLogsDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb new file mode 100644 index 000000000000..e94fadc3fa3c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb @@ -0,0 +1,169 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `datadog_metrics` destination forwards metrics to Datadog. + # + # **Supported pipeline types:** metrics + class ObservabilityPipelineDatadogMetricsDestination + include BaseGenericModel + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the input for this component. + attr_reader :inputs + + # The destination type. The value should always be `datadog_metrics`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'inputs' => :'inputs', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'inputs' => :'Array', + :'type' => :'ObservabilityPipelineDatadogMetricsDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineDatadogMetricsDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + inputs == o.inputs && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, inputs, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb new file mode 100644 index 000000000000..c32b6952246a --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `datadog_metrics`. + class ObservabilityPipelineDatadogMetricsDestinationType + include BaseEnumModel + + DATADOG_METRICS = "datadog_metrics".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb index 5eeb0091f9d0..f406e6b5a8ed 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDatadogTagsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb index 4420f11b474b..3b73f9a136fa 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `dedupe` processor removes duplicate fields in log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDedupeProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb index f49df443a047..d3e3475ce72d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `elasticsearch` destination writes logs to an Elasticsearch cluster. + # + # **Supported pipeline types:** logs class ObservabilityPipelineElasticsearchDestination include BaseGenericModel @@ -27,6 +29,9 @@ class ObservabilityPipelineElasticsearchDestination # The index to write logs to in Elasticsearch. attr_accessor :bulk_index + # Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + attr_accessor :data_stream + # The unique identifier for this component. attr_reader :id @@ -44,6 +49,7 @@ def self.attribute_map { :'api_version' => :'api_version', :'bulk_index' => :'bulk_index', + :'data_stream' => :'data_stream', :'id' => :'id', :'inputs' => :'inputs', :'type' => :'type' @@ -56,6 +62,7 @@ def self.openapi_types { :'api_version' => :'ObservabilityPipelineElasticsearchDestinationApiVersion', :'bulk_index' => :'String', + :'data_stream' => :'ObservabilityPipelineElasticsearchDestinationDataStream', :'id' => :'String', :'inputs' => :'Array', :'type' => :'ObservabilityPipelineElasticsearchDestinationType' @@ -88,6 +95,10 @@ def initialize(attributes = {}) self.bulk_index = attributes[:'bulk_index'] end + if attributes.key?(:'data_stream') + self.data_stream = attributes[:'data_stream'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -171,6 +182,7 @@ def ==(o) self.class == o.class && api_version == o.api_version && bulk_index == o.bulk_index && + data_stream == o.data_stream && id == o.id && inputs == o.inputs && type == o.type && @@ -181,7 +193,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [api_version, bulk_index, id, inputs, type, additional_properties].hash + [api_version, bulk_index, data_stream, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb new file mode 100644 index 000000000000..8bf597f0b45d --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb @@ -0,0 +1,125 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + class ObservabilityPipelineElasticsearchDestinationDataStream + include BaseGenericModel + + # The data stream dataset for your logs. This groups logs by their source or application. + attr_accessor :dataset + + # The data stream type for your logs. This determines how logs are categorized within the data stream. + attr_accessor :dtype + + # The data stream namespace for your logs. This separates logs into different environments or domains. + attr_accessor :namespace + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'dataset' => :'dataset', + :'dtype' => :'dtype', + :'namespace' => :'namespace' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'dataset' => :'String', + :'dtype' => :'String', + :'namespace' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineElasticsearchDestinationDataStream` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'dataset') + self.dataset = attributes[:'dataset'] + end + + if attributes.key?(:'dtype') + self.dtype = attributes[:'dtype'] + end + + if attributes.key?(:'namespace') + self.namespace = attributes[:'namespace'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + dataset == o.dataset && + dtype == o.dtype && + namespace == o.namespace && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [dataset, dtype, namespace, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb index ea73dec4ede1..830a36f1060e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb @@ -17,7 +17,9 @@ require 'time' module DatadogAPIClient::V2 - # The `enrichment_table` processor enriches logs using a static CSV file or GeoIP database. + # The `enrichment_table` processor enriches logs using a static CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, or `reference_table` must be configured. + # + # **Supported pipeline types:** logs class ObservabilityPipelineEnrichmentTableProcessor include BaseGenericModel @@ -39,6 +41,9 @@ class ObservabilityPipelineEnrichmentTableProcessor # A Datadog search query used to determine which logs this processor targets. attr_reader :include + # Uses a Datadog reference table to enrich logs. + attr_accessor :reference_table + # Path where enrichment results should be stored in the log. attr_reader :target @@ -57,6 +62,7 @@ def self.attribute_map :'geoip' => :'geoip', :'id' => :'id', :'include' => :'include', + :'reference_table' => :'reference_table', :'target' => :'target', :'type' => :'type' } @@ -72,6 +78,7 @@ def self.openapi_types :'geoip' => :'ObservabilityPipelineEnrichmentTableGeoIp', :'id' => :'String', :'include' => :'String', + :'reference_table' => :'ObservabilityPipelineEnrichmentTableReferenceTable', :'target' => :'String', :'type' => :'ObservabilityPipelineEnrichmentTableProcessorType' } @@ -119,6 +126,10 @@ def initialize(attributes = {}) self.include = attributes[:'include'] end + if attributes.key?(:'reference_table') + self.reference_table = attributes[:'reference_table'] + end + if attributes.key?(:'target') self.target = attributes[:'target'] end @@ -222,6 +233,7 @@ def ==(o) geoip == o.geoip && id == o.id && include == o.include && + reference_table == o.reference_table && target == o.target && type == o.type && additional_properties == o.additional_properties @@ -231,7 +243,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, enabled, file, geoip, id, include, target, type, additional_properties].hash + [display_name, enabled, file, geoip, id, include, reference_table, target, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb new file mode 100644 index 000000000000..2248ad6775d1 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb @@ -0,0 +1,156 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Uses a Datadog reference table to enrich logs. + class ObservabilityPipelineEnrichmentTableReferenceTable + include BaseGenericModel + + # List of column names to include from the reference table. If not provided, all columns are included. + attr_accessor :columns + + # Path to the field in the log event to match against the reference table. + attr_reader :key_field + + # The unique identifier of the reference table. + attr_reader :table_id + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'columns' => :'columns', + :'key_field' => :'key_field', + :'table_id' => :'table_id' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'columns' => :'Array', + :'key_field' => :'String', + :'table_id' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineEnrichmentTableReferenceTable` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'columns') + if (value = attributes[:'columns']).is_a?(Array) + self.columns = value + end + end + + if attributes.key?(:'key_field') + self.key_field = attributes[:'key_field'] + end + + if attributes.key?(:'table_id') + self.table_id = attributes[:'table_id'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @key_field.nil? + return false if @table_id.nil? + true + end + + # Custom attribute writer method with validation + # @param key_field [Object] Object to be assigned + # @!visibility private + def key_field=(key_field) + if key_field.nil? + fail ArgumentError, 'invalid value for "key_field", key_field cannot be nil.' + end + @key_field = key_field + end + + # Custom attribute writer method with validation + # @param table_id [Object] Object to be assigned + # @!visibility private + def table_id=(table_id) + if table_id.nil? + fail ArgumentError, 'invalid value for "table_id", table_id cannot be nil.' + end + @table_id = table_id + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + columns == o.columns && + key_field == o.key_field && + table_id == o.table_id && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [columns, key_field, table_id, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb index c7339f0900e3..f15e183edf79 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb @@ -17,7 +17,9 @@ require 'time' module DatadogAPIClient::V2 - # The `filter` processor allows conditional processing of logs based on a Datadog search query. Logs that match the `include` query are passed through; others are discarded. + # The `filter` processor allows conditional processing of logs/metrics based on a Datadog search query. Logs/metrics that match the `include` query are passed through; others are discarded. + # + # **Supported pipeline types:** logs, metrics class ObservabilityPipelineFilterProcessor include BaseGenericModel @@ -30,7 +32,7 @@ class ObservabilityPipelineFilterProcessor # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). attr_reader :id - # A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + # A Datadog search query used to determine which logs/metrics should pass through the filter. Logs/metrics that match this query continue to downstream components; others are dropped. attr_reader :include # The processor type. The value should always be `filter`. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb index bbcc1a105056..275c21c9a672 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `fluent_bit` source ingests logs from Fluent Bit. + # + # **Supported pipeline types:** logs class ObservabilityPipelineFluentBitSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb index 1ab9185baaa1..86005282ab60 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `fluentd` source ingests logs from a Fluentd-compatible service. + # + # **Supported pipeline types:** logs class ObservabilityPipelineFluentdSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb index 55d8cf77969f..113af4981ae8 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `generate_datadog_metrics` processor creates custom metrics from logs and sends them to Datadog. # Metrics can be counters, gauges, or distributions and optionally grouped by log fields. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGenerateMetricsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb index 73b6a5d47df9..93eed57d6361 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_chronicle` destination sends logs to Google Chronicle. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGoogleChronicleDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb index ae95f0729d98..e1572e3804a4 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. # It requires a bucket name, GCP authentication, and metadata fields. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGoogleCloudStorageDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb index fe396691e124..f85845c5aead 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGooglePubSubDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb index 73fff4e50425..9bd5196b5d83 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGooglePubSubSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb new file mode 100644 index 000000000000..b113b66cef97 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb @@ -0,0 +1,220 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `http_client` destination sends data to an HTTP endpoint. + # + # **Supported pipeline types:** logs, metrics + class ObservabilityPipelineHttpClientDestination + include BaseGenericModel + + # HTTP authentication strategy. + attr_accessor :auth_strategy + + # Compression configuration for HTTP requests. + attr_accessor :compression + + # Encoding format for log events. + attr_reader :encoding + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the input for this component. + attr_reader :inputs + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The destination type. The value should always be `http_client`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'auth_strategy' => :'auth_strategy', + :'compression' => :'compression', + :'encoding' => :'encoding', + :'id' => :'id', + :'inputs' => :'inputs', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'auth_strategy' => :'ObservabilityPipelineHttpClientDestinationAuthStrategy', + :'compression' => :'ObservabilityPipelineHttpClientDestinationCompression', + :'encoding' => :'ObservabilityPipelineHttpClientDestinationEncoding', + :'id' => :'String', + :'inputs' => :'Array', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineHttpClientDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineHttpClientDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'auth_strategy') + self.auth_strategy = attributes[:'auth_strategy'] + end + + if attributes.key?(:'compression') + self.compression = attributes[:'compression'] + end + + if attributes.key?(:'encoding') + self.encoding = attributes[:'encoding'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @encoding.nil? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param encoding [Object] Object to be assigned + # @!visibility private + def encoding=(encoding) + if encoding.nil? + fail ArgumentError, 'invalid value for "encoding", encoding cannot be nil.' + end + @encoding = encoding + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + auth_strategy == o.auth_strategy && + compression == o.compression && + encoding == o.encoding && + id == o.id && + inputs == o.inputs && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [auth_strategy, compression, encoding, id, inputs, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb new file mode 100644 index 000000000000..dcb34218cfdb --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb @@ -0,0 +1,28 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # HTTP authentication strategy. + class ObservabilityPipelineHttpClientDestinationAuthStrategy + include BaseEnumModel + + NONE = "none".freeze + BASIC = "basic".freeze + BEARER = "bearer".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb new file mode 100644 index 000000000000..705e0d42f63e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb @@ -0,0 +1,123 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression configuration for HTTP requests. + class ObservabilityPipelineHttpClientDestinationCompression + include BaseGenericModel + + # Compression algorithm. + attr_reader :algorithm + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'algorithm' => :'algorithm' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'algorithm' => :'ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineHttpClientDestinationCompression` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'algorithm') + self.algorithm = attributes[:'algorithm'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @algorithm.nil? + true + end + + # Custom attribute writer method with validation + # @param algorithm [Object] Object to be assigned + # @!visibility private + def algorithm=(algorithm) + if algorithm.nil? + fail ArgumentError, 'invalid value for "algorithm", algorithm cannot be nil.' + end + @algorithm = algorithm + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + algorithm == o.algorithm && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [algorithm, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb new file mode 100644 index 000000000000..747413a6726c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression algorithm. + class ObservabilityPipelineHttpClientDestinationCompressionAlgorithm + include BaseEnumModel + + GZIP = "gzip".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb new file mode 100644 index 000000000000..fd8b88c0335b --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Encoding format for log events. + class ObservabilityPipelineHttpClientDestinationEncoding + include BaseEnumModel + + JSON = "json".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb new file mode 100644 index 000000000000..dd91c9255eae --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `http_client`. + class ObservabilityPipelineHttpClientDestinationType + include BaseEnumModel + + HTTP_CLIENT = "http_client".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb index 7a11460ae036..5d71ac3e24a8 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + # + # **Supported pipeline types:** logs class ObservabilityPipelineHttpClientSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb index 4909eb48e5bf..0c50637d8f1e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb @@ -21,6 +21,7 @@ module DatadogAPIClient::V2 class ObservabilityPipelineHttpClientSourceAuthStrategy include BaseEnumModel + NONE = "none".freeze BASIC = "basic".freeze BEARER = "bearer".freeze end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb index 80c08b3a58d7..271315644f12 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `http_server` source collects logs over HTTP POST from external services. + # + # **Supported pipeline types:** logs class ObservabilityPipelineHttpServerSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb new file mode 100644 index 000000000000..5ed8aea58afb --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb @@ -0,0 +1,361 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `kafka` destination sends logs to Apache Kafka topics. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineKafkaDestination + include BaseGenericModel + + # Compression codec for Kafka messages. + attr_accessor :compression + + # Encoding format for log events. + attr_reader :encoding + + # The field name to use for Kafka message headers. + attr_accessor :headers_key + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The field name to use as the Kafka message key. + attr_accessor :key_field + + # Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + attr_accessor :librdkafka_options + + # Maximum time in milliseconds to wait for message delivery confirmation. + attr_reader :message_timeout_ms + + # Duration in seconds for the rate limit window. + attr_reader :rate_limit_duration_secs + + # Maximum number of messages allowed per rate limit duration. + attr_reader :rate_limit_num + + # Specifies the SASL mechanism for authenticating with a Kafka cluster. + attr_accessor :sasl + + # Socket timeout in milliseconds for network requests. + attr_reader :socket_timeout_ms + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The Kafka topic name to publish logs to. + attr_reader :topic + + # The destination type. The value should always be `kafka`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'compression' => :'compression', + :'encoding' => :'encoding', + :'headers_key' => :'headers_key', + :'id' => :'id', + :'inputs' => :'inputs', + :'key_field' => :'key_field', + :'librdkafka_options' => :'librdkafka_options', + :'message_timeout_ms' => :'message_timeout_ms', + :'rate_limit_duration_secs' => :'rate_limit_duration_secs', + :'rate_limit_num' => :'rate_limit_num', + :'sasl' => :'sasl', + :'socket_timeout_ms' => :'socket_timeout_ms', + :'tls' => :'tls', + :'topic' => :'topic', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'compression' => :'ObservabilityPipelineKafkaDestinationCompression', + :'encoding' => :'ObservabilityPipelineKafkaDestinationEncoding', + :'headers_key' => :'String', + :'id' => :'String', + :'inputs' => :'Array', + :'key_field' => :'String', + :'librdkafka_options' => :'Array', + :'message_timeout_ms' => :'Integer', + :'rate_limit_duration_secs' => :'Integer', + :'rate_limit_num' => :'Integer', + :'sasl' => :'ObservabilityPipelineKafkaSasl', + :'socket_timeout_ms' => :'Integer', + :'tls' => :'ObservabilityPipelineTls', + :'topic' => :'String', + :'type' => :'ObservabilityPipelineKafkaDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'compression') + self.compression = attributes[:'compression'] + end + + if attributes.key?(:'encoding') + self.encoding = attributes[:'encoding'] + end + + if attributes.key?(:'headers_key') + self.headers_key = attributes[:'headers_key'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'key_field') + self.key_field = attributes[:'key_field'] + end + + if attributes.key?(:'librdkafka_options') + if (value = attributes[:'librdkafka_options']).is_a?(Array) + self.librdkafka_options = value + end + end + + if attributes.key?(:'message_timeout_ms') + self.message_timeout_ms = attributes[:'message_timeout_ms'] + end + + if attributes.key?(:'rate_limit_duration_secs') + self.rate_limit_duration_secs = attributes[:'rate_limit_duration_secs'] + end + + if attributes.key?(:'rate_limit_num') + self.rate_limit_num = attributes[:'rate_limit_num'] + end + + if attributes.key?(:'sasl') + self.sasl = attributes[:'sasl'] + end + + if attributes.key?(:'socket_timeout_ms') + self.socket_timeout_ms = attributes[:'socket_timeout_ms'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'topic') + self.topic = attributes[:'topic'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @encoding.nil? + return false if @id.nil? + return false if @inputs.nil? + return false if !@message_timeout_ms.nil? && @message_timeout_ms < 1 + return false if !@rate_limit_duration_secs.nil? && @rate_limit_duration_secs < 1 + return false if !@rate_limit_num.nil? && @rate_limit_num < 1 + return false if !@socket_timeout_ms.nil? && @socket_timeout_ms > 300000 + return false if !@socket_timeout_ms.nil? && @socket_timeout_ms < 10 + return false if @topic.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param encoding [Object] Object to be assigned + # @!visibility private + def encoding=(encoding) + if encoding.nil? + fail ArgumentError, 'invalid value for "encoding", encoding cannot be nil.' + end + @encoding = encoding + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param message_timeout_ms [Object] Object to be assigned + # @!visibility private + def message_timeout_ms=(message_timeout_ms) + if !message_timeout_ms.nil? && message_timeout_ms < 1 + fail ArgumentError, 'invalid value for "message_timeout_ms", must be greater than or equal to 1.' + end + @message_timeout_ms = message_timeout_ms + end + + # Custom attribute writer method with validation + # @param rate_limit_duration_secs [Object] Object to be assigned + # @!visibility private + def rate_limit_duration_secs=(rate_limit_duration_secs) + if !rate_limit_duration_secs.nil? && rate_limit_duration_secs < 1 + fail ArgumentError, 'invalid value for "rate_limit_duration_secs", must be greater than or equal to 1.' + end + @rate_limit_duration_secs = rate_limit_duration_secs + end + + # Custom attribute writer method with validation + # @param rate_limit_num [Object] Object to be assigned + # @!visibility private + def rate_limit_num=(rate_limit_num) + if !rate_limit_num.nil? && rate_limit_num < 1 + fail ArgumentError, 'invalid value for "rate_limit_num", must be greater than or equal to 1.' + end + @rate_limit_num = rate_limit_num + end + + # Custom attribute writer method with validation + # @param socket_timeout_ms [Object] Object to be assigned + # @!visibility private + def socket_timeout_ms=(socket_timeout_ms) + if !socket_timeout_ms.nil? && socket_timeout_ms > 300000 + fail ArgumentError, 'invalid value for "socket_timeout_ms", must be smaller than or equal to 300000.' + end + if !socket_timeout_ms.nil? && socket_timeout_ms < 10 + fail ArgumentError, 'invalid value for "socket_timeout_ms", must be greater than or equal to 10.' + end + @socket_timeout_ms = socket_timeout_ms + end + + # Custom attribute writer method with validation + # @param topic [Object] Object to be assigned + # @!visibility private + def topic=(topic) + if topic.nil? + fail ArgumentError, 'invalid value for "topic", topic cannot be nil.' + end + @topic = topic + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + compression == o.compression && + encoding == o.encoding && + headers_key == o.headers_key && + id == o.id && + inputs == o.inputs && + key_field == o.key_field && + librdkafka_options == o.librdkafka_options && + message_timeout_ms == o.message_timeout_ms && + rate_limit_duration_secs == o.rate_limit_duration_secs && + rate_limit_num == o.rate_limit_num && + sasl == o.sasl && + socket_timeout_ms == o.socket_timeout_ms && + tls == o.tls && + topic == o.topic && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [compression, encoding, headers_key, id, inputs, key_field, librdkafka_options, message_timeout_ms, rate_limit_duration_secs, rate_limit_num, sasl, socket_timeout_ms, tls, topic, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb new file mode 100644 index 000000000000..ccb074bfdb21 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb @@ -0,0 +1,30 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression codec for Kafka messages. + class ObservabilityPipelineKafkaDestinationCompression + include BaseEnumModel + + NONE = "none".freeze + GZIP = "gzip".freeze + SNAPPY = "snappy".freeze + LZ4 = "lz4".freeze + ZSTD = "zstd".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb new file mode 100644 index 000000000000..34209b47273c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Encoding format for log events. + class ObservabilityPipelineKafkaDestinationEncoding + include BaseEnumModel + + JSON = "json".freeze + RAW_MESSAGE = "raw_message".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb new file mode 100644 index 000000000000..0b2f5afbda5f --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `kafka`. + class ObservabilityPipelineKafkaDestinationType + include BaseEnumModel + + KAFKA = "kafka".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb similarity index 95% rename from lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb index b911c8aaf19a..bf29a784d49c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb @@ -17,8 +17,8 @@ require 'time' module DatadogAPIClient::V2 - # Represents a key-value pair used to configure low-level `librdkafka` client options for Kafka sources, such as timeouts, buffer sizes, and security settings. - class ObservabilityPipelineKafkaSourceLibrdkafkaOption + # Represents a key-value pair used to configure low-level `librdkafka` client options for Kafka source and destination, such as timeouts, buffer sizes, and security settings. + class ObservabilityPipelineKafkaLibrdkafkaOption include BaseGenericModel # The name of the `librdkafka` configuration option to set. @@ -52,7 +52,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSourceLibrdkafkaOption` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaLibrdkafkaOption` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb similarity index 95% rename from lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb index 886923a1868f..88fb7915b9da 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb @@ -18,7 +18,7 @@ module DatadogAPIClient::V2 # Specifies the SASL mechanism for authenticating with a Kafka cluster. - class ObservabilityPipelineKafkaSourceSasl + class ObservabilityPipelineKafkaSasl include BaseGenericModel # SASL mechanism used for Kafka authentication. @@ -38,7 +38,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { - :'mechanism' => :'ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + :'mechanism' => :'ObservabilityPipelineKafkaSaslMechanism' } end @@ -47,7 +47,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSourceSasl` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSasl` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb similarity index 92% rename from lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb index 64187c5011f0..64cdb527b7ab 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb @@ -18,7 +18,7 @@ module DatadogAPIClient::V2 # SASL mechanism used for Kafka authentication. - class ObservabilityPipelinePipelineKafkaSourceSaslMechanism + class ObservabilityPipelineKafkaSaslMechanism include BaseEnumModel PLAIN = "PLAIN".freeze diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb index 33c4b1649b15..0f3b2381f213 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `kafka` source ingests data from Apache Kafka topics. + # + # **Supported pipeline types:** logs class ObservabilityPipelineKafkaSource include BaseGenericModel @@ -64,8 +66,8 @@ def self.openapi_types { :'group_id' => :'String', :'id' => :'String', - :'librdkafka_options' => :'Array', - :'sasl' => :'ObservabilityPipelineKafkaSourceSasl', + :'librdkafka_options' => :'Array', + :'sasl' => :'ObservabilityPipelineKafkaSasl', :'tls' => :'ObservabilityPipelineTls', :'topics' => :'Array', :'type' => :'ObservabilityPipelineKafkaSourceType' diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb index 3a9550b0bb40..ce22b2fe3500 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `logstash` source ingests logs from a Logstash forwarder. + # + # **Supported pipeline types:** logs class ObservabilityPipelineLogstashSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb new file mode 100644 index 000000000000..52916ac7691d --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb @@ -0,0 +1,229 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `metric_tags` processor filters metrics based on their tags using Datadog tag key patterns. + # + # **Supported pipeline types:** metrics + class ObservabilityPipelineMetricTagsProcessor + include BaseGenericModel + + # The display name for a component. + attr_accessor :display_name + + # Whether this processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which metrics this processor targets. + attr_reader :include + + # A list of rules for filtering metric tags. + attr_reader :rules + + # The processor type. The value should always be `metric_tags`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'rules' => :'rules', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'rules' => :'Array', + :'type' => :'ObservabilityPipelineMetricTagsProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMetricTagsProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'rules') + if (value = attributes[:'rules']).is_a?(Array) + self.rules = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @rules.nil? + return false if @rules.length > 100 + return false if @rules.length < 1 + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param rules [Object] Object to be assigned + # @!visibility private + def rules=(rules) + if rules.nil? + fail ArgumentError, 'invalid value for "rules", rules cannot be nil.' + end + if rules.length > 100 + fail ArgumentError, 'invalid value for "rules", number of items must be less than or equal to 100.' + end + if rules.length < 1 + fail ArgumentError, 'invalid value for "rules", number of items must be greater than or equal to 1.' + end + @rules = rules + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + rules == o.rules && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [display_name, enabled, id, include, rules, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb new file mode 100644 index 000000000000..fb083ab51a1a --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb @@ -0,0 +1,167 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Defines a rule for filtering metric tags based on key patterns. + class ObservabilityPipelineMetricTagsProcessorRule + include BaseGenericModel + + # The action to take on tags with matching keys. + attr_reader :action + + # A list of tag keys to include or exclude. + attr_reader :keys + + # The processing mode for tag filtering. + attr_reader :mode + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'action' => :'action', + :'keys' => :'keys', + :'mode' => :'mode' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'action' => :'ObservabilityPipelineMetricTagsProcessorRuleAction', + :'keys' => :'Array', + :'mode' => :'ObservabilityPipelineMetricTagsProcessorRuleMode' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMetricTagsProcessorRule` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'action') + self.action = attributes[:'action'] + end + + if attributes.key?(:'keys') + if (value = attributes[:'keys']).is_a?(Array) + self.keys = value + end + end + + if attributes.key?(:'mode') + self.mode = attributes[:'mode'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @action.nil? + return false if @keys.nil? + return false if @mode.nil? + true + end + + # Custom attribute writer method with validation + # @param action [Object] Object to be assigned + # @!visibility private + def action=(action) + if action.nil? + fail ArgumentError, 'invalid value for "action", action cannot be nil.' + end + @action = action + end + + # Custom attribute writer method with validation + # @param keys [Object] Object to be assigned + # @!visibility private + def keys=(keys) + if keys.nil? + fail ArgumentError, 'invalid value for "keys", keys cannot be nil.' + end + @keys = keys + end + + # Custom attribute writer method with validation + # @param mode [Object] Object to be assigned + # @!visibility private + def mode=(mode) + if mode.nil? + fail ArgumentError, 'invalid value for "mode", mode cannot be nil.' + end + @mode = mode + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + action == o.action && + keys == o.keys && + mode == o.mode && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [action, keys, mode, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb new file mode 100644 index 000000000000..253bdb73f64b --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The action to take on tags with matching keys. + class ObservabilityPipelineMetricTagsProcessorRuleAction + include BaseEnumModel + + INCLUDE = "include".freeze + EXCLUDE = "exclude".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb new file mode 100644 index 000000000000..019343fe1a3c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processing mode for tag filtering. + class ObservabilityPipelineMetricTagsProcessorRuleMode + include BaseEnumModel + + FILTER = "filter".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb new file mode 100644 index 000000000000..0ddd08d1d7cd --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `metric_tags`. + class ObservabilityPipelineMetricTagsProcessorType + include BaseEnumModel + + METRIC_TAGS = "metric_tags".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb index d8705a24d962..79118217199f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `new_relic` destination sends logs to the New Relic platform. + # + # **Supported pipeline types:** logs class ObservabilityPipelineNewRelicDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb index a483ca5bbde0..c76613e22bc9 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + # + # **Supported pipeline types:** logs class ObservabilityPipelineOcsfMapperProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb index 2120c2eef855..09331c31ee2b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `opensearch` destination writes logs to an OpenSearch cluster. + # + # **Supported pipeline types:** logs class ObservabilityPipelineOpenSearchDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb new file mode 100644 index 000000000000..950d2cfdf9c2 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb @@ -0,0 +1,176 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `opentelemetry` source receives telemetry data using the OpenTelemetry Protocol (OTLP) over gRPC and HTTP. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineOpentelemetrySource + include BaseGenericModel + + # Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + attr_accessor :grpc_address_key + + # Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + attr_accessor :http_address_key + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + attr_reader :id + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The source type. The value should always be `opentelemetry`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'grpc_address_key' => :'grpc_address_key', + :'http_address_key' => :'http_address_key', + :'id' => :'id', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'grpc_address_key' => :'String', + :'http_address_key' => :'String', + :'id' => :'String', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineOpentelemetrySourceType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineOpentelemetrySource` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'grpc_address_key') + self.grpc_address_key = attributes[:'grpc_address_key'] + end + + if attributes.key?(:'http_address_key') + self.http_address_key = attributes[:'http_address_key'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + grpc_address_key == o.grpc_address_key && + http_address_key == o.http_address_key && + id == o.id && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [grpc_address_key, http_address_key, id, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb new file mode 100644 index 000000000000..30df02a5c2ef --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The source type. The value should always be `opentelemetry`. + class ObservabilityPipelineOpentelemetrySourceType + include BaseEnumModel + + OPENTELEMETRY = "opentelemetry".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb index 3fa75b46f8ae..d93b1dc06f7b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + # + # **Supported pipeline types:** logs class ObservabilityPipelineParseGrokProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb index ef5839c3709c..c692004ed65a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + # + # **Supported pipeline types:** logs class ObservabilityPipelineParseJSONProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb new file mode 100644 index 000000000000..40c7302b40fe --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb @@ -0,0 +1,300 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `parse_xml` processor parses XML from a specified field and extracts it into the event. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineParseXMLProcessor + include BaseGenericModel + + # Whether to always use a text key for element content. + attr_accessor :always_use_text_key + + # The prefix to use for XML attributes in the parsed output. + attr_accessor :attr_prefix + + # The display name for a component. + attr_accessor :display_name + + # Whether this processor is enabled. + attr_reader :enabled + + # The name of the log field that contains an XML string. + attr_reader :field + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # Whether to include XML attributes in the parsed output. + attr_accessor :include_attr + + # Whether to parse boolean values from strings. + attr_accessor :parse_bool + + # Whether to parse null values. + attr_accessor :parse_null + + # Whether to parse numeric values from strings. + attr_accessor :parse_number + + # The key name to use for text content within XML elements. Must be at least 1 character if specified. + attr_reader :text_key + + # The processor type. The value should always be `parse_xml`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'always_use_text_key' => :'always_use_text_key', + :'attr_prefix' => :'attr_prefix', + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'field' => :'field', + :'id' => :'id', + :'include' => :'include', + :'include_attr' => :'include_attr', + :'parse_bool' => :'parse_bool', + :'parse_null' => :'parse_null', + :'parse_number' => :'parse_number', + :'text_key' => :'text_key', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'always_use_text_key' => :'Boolean', + :'attr_prefix' => :'String', + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'field' => :'String', + :'id' => :'String', + :'include' => :'String', + :'include_attr' => :'Boolean', + :'parse_bool' => :'Boolean', + :'parse_null' => :'Boolean', + :'parse_number' => :'Boolean', + :'text_key' => :'String', + :'type' => :'ObservabilityPipelineParseXMLProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineParseXMLProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'always_use_text_key') + self.always_use_text_key = attributes[:'always_use_text_key'] + end + + if attributes.key?(:'attr_prefix') + self.attr_prefix = attributes[:'attr_prefix'] + end + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'field') + self.field = attributes[:'field'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'include_attr') + self.include_attr = attributes[:'include_attr'] + end + + if attributes.key?(:'parse_bool') + self.parse_bool = attributes[:'parse_bool'] + end + + if attributes.key?(:'parse_null') + self.parse_null = attributes[:'parse_null'] + end + + if attributes.key?(:'parse_number') + self.parse_number = attributes[:'parse_number'] + end + + if attributes.key?(:'text_key') + self.text_key = attributes[:'text_key'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @field.nil? + return false if @id.nil? + return false if @include.nil? + return false if !@text_key.nil? && @text_key.to_s.length < 1 + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param field [Object] Object to be assigned + # @!visibility private + def field=(field) + if field.nil? + fail ArgumentError, 'invalid value for "field", field cannot be nil.' + end + @field = field + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param text_key [Object] Object to be assigned + # @!visibility private + def text_key=(text_key) + if !text_key.nil? && text_key.to_s.length < 1 + fail ArgumentError, 'invalid value for "text_key", the character length must be great than or equal to 1.' + end + @text_key = text_key + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + always_use_text_key == o.always_use_text_key && + attr_prefix == o.attr_prefix && + display_name == o.display_name && + enabled == o.enabled && + field == o.field && + id == o.id && + include == o.include && + include_attr == o.include_attr && + parse_bool == o.parse_bool && + parse_null == o.parse_null && + parse_number == o.parse_number && + text_key == o.text_key && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [always_use_text_key, attr_prefix, display_name, enabled, field, id, include, include_attr, parse_bool, parse_null, parse_number, text_key, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb new file mode 100644 index 000000000000..c75fd6adfa84 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `parse_xml`. + class ObservabilityPipelineParseXMLProcessorType + include BaseEnumModel + + PARSE_XML = "parse_xml".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb index 48b8ef8622a8..c2ddfccc780e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb @@ -17,14 +17,16 @@ require 'time' module DatadogAPIClient::V2 - # The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + # The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + # + # **Supported pipeline types:** logs class ObservabilityPipelineQuotaProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + # If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. attr_accessor :drop_events # Whether this processor is enabled. @@ -45,7 +47,7 @@ class ObservabilityPipelineQuotaProcessor # Name of the quota. attr_reader :name - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. @@ -57,6 +59,12 @@ class ObservabilityPipelineQuotaProcessor # A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. attr_accessor :partition_fields + # The action to take when the quota or bucket limit is exceeded. Options: + # - `drop`: Drop the event. + # - `no_action`: Let the event pass through. + # - `overflow_routing`: Route to an overflow destination. + attr_accessor :too_many_buckets_action + # The processor type. The value should always be `quota`. attr_reader :type @@ -77,6 +85,7 @@ def self.attribute_map :'overflow_action' => :'overflow_action', :'overrides' => :'overrides', :'partition_fields' => :'partition_fields', + :'too_many_buckets_action' => :'too_many_buckets_action', :'type' => :'type' } end @@ -96,6 +105,7 @@ def self.openapi_types :'overflow_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'overrides' => :'Array', :'partition_fields' => :'Array', + :'too_many_buckets_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'type' => :'ObservabilityPipelineQuotaProcessorType' } end @@ -166,6 +176,10 @@ def initialize(attributes = {}) end end + if attributes.key?(:'too_many_buckets_action') + self.too_many_buckets_action = attributes[:'too_many_buckets_action'] + end + if attributes.key?(:'type') self.type = attributes[:'type'] end @@ -281,6 +295,7 @@ def ==(o) overflow_action == o.overflow_action && overrides == o.overrides && partition_fields == o.partition_fields && + too_many_buckets_action == o.too_many_buckets_action && type == o.type && additional_properties == o.additional_properties end @@ -289,7 +304,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, drop_events, enabled, id, ignore_when_missing_partitions, include, limit, name, overflow_action, overrides, partition_fields, type, additional_properties].hash + [display_name, drop_events, enabled, id, ignore_when_missing_partitions, include, limit, name, overflow_action, overrides, partition_fields, too_many_buckets_action, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb index 4990af20ad61..06bc2d264f1f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb @@ -17,7 +17,7 @@ require 'time' module DatadogAPIClient::V2 - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb index 1e5b4c0c8db8..6a638e3ef62d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + # + # **Supported pipeline types:** logs class ObservabilityPipelineReduceProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb index a6c12fffa560..cb54c09855b3 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `remove_fields` processor deletes specified fields from logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRemoveFieldsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb index a0ab10e93ae8..321306537891 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `rename_fields` processor changes field names. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRenameFieldsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb index 4c15ba87ef60..c3151af7153c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRsyslogDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb index 51a55a2daf12..1f343d8e4fe8 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` server using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRsyslogSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb index e8e7e7af2b7b..4ae281e2224e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sample` processor allows probabilistic sampling of logs at a fixed rate. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSampleProcessor include BaseGenericModel @@ -27,6 +29,9 @@ class ObservabilityPipelineSampleProcessor # Whether this processor is enabled. attr_reader :enabled + # Optional list of fields to group events by. Each group is sampled independently. + attr_reader :group_by + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). attr_reader :id @@ -34,10 +39,7 @@ class ObservabilityPipelineSampleProcessor attr_reader :include # The percentage of logs to sample. - attr_accessor :percentage - - # Number of events to sample (1 in N). - attr_reader :rate + attr_reader :percentage # The processor type. The value should always be `sample`. attr_reader :type @@ -50,10 +52,10 @@ def self.attribute_map { :'display_name' => :'display_name', :'enabled' => :'enabled', + :'group_by' => :'group_by', :'id' => :'id', :'include' => :'include', :'percentage' => :'percentage', - :'rate' => :'rate', :'type' => :'type' } end @@ -64,10 +66,10 @@ def self.openapi_types { :'display_name' => :'String', :'enabled' => :'Boolean', + :'group_by' => :'Array', :'id' => :'String', :'include' => :'String', :'percentage' => :'Float', - :'rate' => :'Integer', :'type' => :'ObservabilityPipelineSampleProcessorType' } end @@ -98,6 +100,12 @@ def initialize(attributes = {}) self.enabled = attributes[:'enabled'] end + if attributes.key?(:'group_by') + if (value = attributes[:'group_by']).is_a?(Array) + self.group_by = value + end + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -110,10 +118,6 @@ def initialize(attributes = {}) self.percentage = attributes[:'percentage'] end - if attributes.key?(:'rate') - self.rate = attributes[:'rate'] - end - if attributes.key?(:'type') self.type = attributes[:'type'] end @@ -124,9 +128,10 @@ def initialize(attributes = {}) # @!visibility private def valid? return false if @enabled.nil? + return false if !@group_by.nil? && @group_by.length < 1 return false if @id.nil? return false if @include.nil? - return false if !@rate.nil? && @rate < 1 + return false if @percentage.nil? return false if @type.nil? true end @@ -141,6 +146,16 @@ def enabled=(enabled) @enabled = enabled end + # Custom attribute writer method with validation + # @param group_by [Object] Object to be assigned + # @!visibility private + def group_by=(group_by) + if !group_by.nil? && group_by.length < 1 + fail ArgumentError, 'invalid value for "group_by", number of items must be greater than or equal to 1.' + end + @group_by = group_by + end + # Custom attribute writer method with validation # @param id [Object] Object to be assigned # @!visibility private @@ -162,13 +177,13 @@ def include=(include) end # Custom attribute writer method with validation - # @param rate [Object] Object to be assigned + # @param percentage [Object] Object to be assigned # @!visibility private - def rate=(rate) - if !rate.nil? && rate < 1 - fail ArgumentError, 'invalid value for "rate", must be greater than or equal to 1.' + def percentage=(percentage) + if percentage.nil? + fail ArgumentError, 'invalid value for "percentage", percentage cannot be nil.' end - @rate = rate + @percentage = percentage end # Custom attribute writer method with validation @@ -209,10 +224,10 @@ def ==(o) self.class == o.class && display_name == o.display_name && enabled == o.enabled && + group_by == o.group_by && id == o.id && include == o.include && percentage == o.percentage && - rate == o.rate && type == o.type && additional_properties == o.additional_properties end @@ -221,7 +236,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, enabled, id, include, percentage, rate, type, additional_properties].hash + [display_name, enabled, group_by, id, include, percentage, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb index 898739512aa8..ae85fbb9e623 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sensitive_data_scanner` processor detects and optionally redacts sensitive data in log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSensitiveDataScannerProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb index e7da9561356a..bd5edd5fb419 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions include BaseGenericModel + # Human-readable description providing context about a sensitive data scanner rule + attr_accessor :description + # A regular expression used to detect sensitive values. Must be a valid regex. attr_reader :rule @@ -30,6 +33,7 @@ class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions # @!visibility private def self.attribute_map { + :'description' => :'description', :'rule' => :'rule' } end @@ -38,6 +42,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'description' => :'String', :'rule' => :'String' } end @@ -60,6 +65,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'description') + self.description = attributes[:'description'] + end + if attributes.key?(:'rule') self.rule = attributes[:'rule'] end @@ -109,6 +118,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + description == o.description && rule == o.rule && additional_properties == o.additional_properties end @@ -117,7 +127,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [rule, additional_properties].hash + [description, rule, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb index d565d68bb035..b4c3b6d952b4 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions include BaseGenericModel + # Human-readable description providing context about a sensitive data scanner rule + attr_accessor :description + # Identifier for a predefined pattern from the sensitive data scanner pattern library. attr_reader :id @@ -33,6 +36,7 @@ class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions # @!visibility private def self.attribute_map { + :'description' => :'description', :'id' => :'id', :'use_recommended_keywords' => :'use_recommended_keywords' } @@ -42,6 +46,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'description' => :'String', :'id' => :'String', :'use_recommended_keywords' => :'Boolean' } @@ -65,6 +70,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'description') + self.description = attributes[:'description'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -118,6 +127,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + description == o.description && id == o.id && use_recommended_keywords == o.use_recommended_keywords && additional_properties == o.additional_properties @@ -127,7 +137,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, use_recommended_keywords, additional_properties].hash + [description, id, use_recommended_keywords, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb index 9d0af0150f23..4b3548624a76 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sentinel_one` destination sends logs to SentinelOne. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSentinelOneDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb index bbe025820793..5a8c561616cc 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `socket` destination sends logs over TCP or UDP to a remote server. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSocketDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb index 17356b2960a3..b1c85ce0dabf 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `socket` source ingests logs over TCP or UDP. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSocketSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb new file mode 100644 index 000000000000..13cce027aa6d --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb @@ -0,0 +1,229 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `split_array` processor splits array fields into separate events based on configured rules. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineSplitArrayProcessor + include BaseGenericModel + + # A list of array split configurations. + attr_reader :arrays + + # The display name for a component. + attr_accessor :display_name + + # Whether this processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. For split_array, this should typically be `*`. + attr_reader :include + + # The processor type. The value should always be `split_array`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'arrays' => :'arrays', + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'arrays' => :'Array', + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'type' => :'ObservabilityPipelineSplitArrayProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineSplitArrayProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'arrays') + if (value = attributes[:'arrays']).is_a?(Array) + self.arrays = value + end + end + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @arrays.nil? + return false if @arrays.length > 15 + return false if @arrays.length < 1 + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param arrays [Object] Object to be assigned + # @!visibility private + def arrays=(arrays) + if arrays.nil? + fail ArgumentError, 'invalid value for "arrays", arrays cannot be nil.' + end + if arrays.length > 15 + fail ArgumentError, 'invalid value for "arrays", number of items must be less than or equal to 15.' + end + if arrays.length < 1 + fail ArgumentError, 'invalid value for "arrays", number of items must be greater than or equal to 1.' + end + @arrays = arrays + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + arrays == o.arrays && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [arrays, display_name, enabled, id, include, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb new file mode 100644 index 000000000000..50595a668515 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb @@ -0,0 +1,144 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration for a single array split operation. + class ObservabilityPipelineSplitArrayProcessorArrayConfig + include BaseGenericModel + + # The path to the array field to split. + attr_reader :field + + # A Datadog search query used to determine which logs this array split operation targets. + attr_reader :include + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'field' => :'field', + :'include' => :'include' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'field' => :'String', + :'include' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineSplitArrayProcessorArrayConfig` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'field') + self.field = attributes[:'field'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @field.nil? + return false if @include.nil? + true + end + + # Custom attribute writer method with validation + # @param field [Object] Object to be assigned + # @!visibility private + def field=(field) + if field.nil? + fail ArgumentError, 'invalid value for "field", field cannot be nil.' + end + @field = field + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + field == o.field && + include == o.include && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [field, include, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb new file mode 100644 index 000000000000..fcb416f78acb --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `split_array`. + class ObservabilityPipelineSplitArrayProcessorType + include BaseEnumModel + + SPLIT_ARRAY = "split_array".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb index dd77cfec9255..a3a8137492aa 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkHecDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb index f3402f6b6db2..9cf5eb567111 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkHecSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb index 66d3633cd1b2..2f91c150b701 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. # TLS is supported for secure transmission. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkTcpSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb index 6193fb57f020..7a3c31742c5f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sumo_logic` destination forwards logs to Sumo Logic. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSumoLogicDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb index e4b3bcf3c833..5840844f2764 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sumo_logic` source receives logs from Sumo Logic collectors. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSumoLogicSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb index 045dcfe90403..132ffd70a899 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSyslogNgDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb index 85f107e721cd..2666604e126a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSyslogNgSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb index 3d19ef6446cf..74750aec7197 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `throttle` processor limits the number of events that pass through over a given time window. + # + # **Supported pipeline types:** logs class ObservabilityPipelineThrottleProcessor include BaseGenericModel