From dfb2bbe0b0c73e8cef186e05f7afde4fcae7f299 Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Mon, 22 Dec 2025 10:39:46 +0000 Subject: [PATCH] Regenerate client from commit 4fdb1f5 of spec repo --- .generator/schemas/v2/openapi.yaml | 676 +++++++++++++++--- features/v2/observability_pipelines.feature | 4 +- lib/datadog_api_client/inflector.rb | 14 + .../v2/models/azure_storage_destination.rb | 2 + .../models/microsoft_sentinel_destination.rb | 2 + ...ability_pipeline_add_env_vars_processor.rb | 2 + ...rvability_pipeline_add_fields_processor.rb | 2 + ...ty_pipeline_amazon_data_firehose_source.rb | 2 + ...pipeline_amazon_open_search_destination.rb | 2 + ...vability_pipeline_amazon_s3_destination.rb | 2 + ...observability_pipeline_amazon_s3_source.rb | 2 + ...peline_amazon_security_lake_destination.rb | 2 + .../models/observability_pipeline_config.rb | 12 +- ...bility_pipeline_config_destination_item.rb | 28 +- ...rvability_pipeline_config_pipeline_type.rb | 27 + ...vability_pipeline_config_processor_item.rb | 21 +- ...servability_pipeline_config_source_item.rb | 20 +- ..._crowd_strike_next_gen_siem_destination.rb | 2 + ...observability_pipeline_custom_processor.rb | 2 + ...rvability_pipeline_datadog_agent_source.rb | 4 +- ...ility_pipeline_datadog_logs_destination.rb | 2 + ...ty_pipeline_datadog_metrics_destination.rb | 169 +++++ ...peline_datadog_metrics_destination_type.rb | 26 + ...ability_pipeline_datadog_tags_processor.rb | 2 + ...observability_pipeline_dedupe_processor.rb | 2 + ...lity_pipeline_elasticsearch_destination.rb | 2 + ...ity_pipeline_enrichment_table_processor.rb | 2 + ...observability_pipeline_filter_processor.rb | 6 +- ...bservability_pipeline_fluent_bit_source.rb | 2 + .../observability_pipeline_fluentd_source.rb | 2 + ...ity_pipeline_generate_metrics_processor.rb | 2 + ...y_pipeline_google_chronicle_destination.rb | 2 + ...peline_google_cloud_storage_destination.rb | 2 + ...ity_pipeline_google_pub_sub_destination.rb | 2 + ...vability_pipeline_google_pub_sub_source.rb | 2 + ...bility_pipeline_http_client_destination.rb | 220 ++++++ ...e_http_client_destination_auth_strategy.rb | 27 + ...ine_http_client_destination_compression.rb | 123 ++++ ...lient_destination_compression_algorithm.rb | 26 + ...peline_http_client_destination_encoding.rb | 26 + ...y_pipeline_http_client_destination_type.rb | 26 + ...servability_pipeline_http_client_source.rb | 2 + ...servability_pipeline_http_server_source.rb | 2 + .../observability_pipeline_kafka_source.rb | 2 + .../observability_pipeline_logstash_source.rb | 2 + ...vability_pipeline_metric_tags_processor.rb | 219 ++++++ ...ity_pipeline_metric_tags_processor_rule.rb | 167 +++++ ...eline_metric_tags_processor_rule_action.rb | 27 + ...ipeline_metric_tags_processor_rule_mode.rb | 26 + ...ity_pipeline_metric_tags_processor_type.rb | 26 + ...vability_pipeline_new_relic_destination.rb | 2 + ...vability_pipeline_ocsf_mapper_processor.rb | 2 + ...bility_pipeline_open_search_destination.rb | 2 + ...rvability_pipeline_parse_grok_processor.rb | 2 + ...rvability_pipeline_parse_json_processor.rb | 2 + .../observability_pipeline_quota_processor.rb | 4 +- ...observability_pipeline_reduce_processor.rb | 2 + ...bility_pipeline_remove_fields_processor.rb | 2 + ...bility_pipeline_rename_fields_processor.rb | 2 + ...ervability_pipeline_rsyslog_destination.rb | 2 + .../observability_pipeline_rsyslog_source.rb | 2 + ...observability_pipeline_sample_processor.rb | 2 + ...peline_sensitive_data_scanner_processor.rb | 2 + ...ility_pipeline_sentinel_one_destination.rb | 2 + ...servability_pipeline_socket_destination.rb | 2 + .../observability_pipeline_socket_source.rb | 2 + ...ability_pipeline_splunk_hec_destination.rb | 2 + ...bservability_pipeline_splunk_hec_source.rb | 2 + ...bservability_pipeline_splunk_tcp_source.rb | 2 + ...ability_pipeline_sumo_logic_destination.rb | 2 + ...bservability_pipeline_sumo_logic_source.rb | 2 + ...vability_pipeline_syslog_ng_destination.rb | 2 + ...observability_pipeline_syslog_ng_source.rb | 2 + ...servability_pipeline_throttle_processor.rb | 2 + 74 files changed, 1890 insertions(+), 134 deletions(-) create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index cfbcb7286ac..203e7aa0447 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6570,8 +6570,11 @@ components: x-enum-varnames: - AZURE_SCAN_OPTIONS AzureStorageDestination: - description: The `azure_storage` destination forwards logs to an Azure Blob + description: 'The `azure_storage` destination forwards logs to an Azure Blob Storage container. + + + **Supported pipeline types:** logs' properties: blob_prefix: description: Optional prefix for blobs written to the container. @@ -6602,6 +6605,8 @@ components: - inputs - container_name type: object + x-pipeline-types: + - logs AzureStorageDestinationType: default: azure_storage description: The destination type. The value should always be `azure_storage`. @@ -33395,8 +33400,11 @@ components: - query type: object MicrosoftSentinelDestination: - description: The `microsoft_sentinel` destination forwards logs to Microsoft + description: 'The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + + + **Supported pipeline types:** logs' properties: client_id: description: Azure AD client ID used for authentication. @@ -33437,6 +33445,8 @@ components: - dcr_immutable_id - table type: object + x-pipeline-types: + - logs MicrosoftSentinelDestinationType: default: microsoft_sentinel description: The destination type. The value should always be `microsoft_sentinel`. @@ -35141,8 +35151,11 @@ components: - data type: object ObservabilityPipelineAddEnvVarsProcessor: - description: The `add_env_vars` processor adds environment variable values to - log events. + description: 'The `add_env_vars` processor adds environment variable values + to log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35174,6 +35187,8 @@ components: - variables - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddEnvVarsProcessorType: default: add_env_vars description: The processor type. The value should always be `add_env_vars`. @@ -35199,7 +35214,10 @@ components: - name type: object ObservabilityPipelineAddFieldsProcessor: - description: The `add_fields` processor adds static key-value fields to logs. + description: 'The `add_fields` processor adds static key-value fields to logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35233,6 +35251,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddFieldsProcessorType: default: add_fields description: The processor type. The value should always be `add_fields`. @@ -35243,7 +35263,10 @@ components: x-enum-varnames: - ADD_FIELDS ObservabilityPipelineAmazonDataFirehoseSource: - description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + description: 'The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35261,6 +35284,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonDataFirehoseSourceType: default: amazon_data_firehose description: The source type. The value should always be `amazon_data_firehose`. @@ -35271,7 +35296,10 @@ components: x-enum-varnames: - AMAZON_DATA_FIREHOSE ObservabilityPipelineAmazonOpenSearchDestination: - description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + description: 'The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' @@ -35299,6 +35327,8 @@ components: - inputs - auth type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonOpenSearchDestinationAuth: description: 'Authentication settings for the Amazon OpenSearch destination. @@ -35342,8 +35372,11 @@ components: x-enum-varnames: - AMAZON_OPENSEARCH ObservabilityPipelineAmazonS3Destination: - description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + description: 'The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35384,6 +35417,8 @@ components: - region - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3DestinationStorageClass: description: S3 storage class. enum: @@ -35420,7 +35455,10 @@ components: ObservabilityPipelineAmazonS3Source: description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. - It supports AWS authentication and TLS encryption.' + It supports AWS authentication and TLS encryption. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35443,6 +35481,8 @@ components: - type - region type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3SourceType: default: amazon_s3 description: The source type. Always `amazon_s3`. @@ -35453,8 +35493,11 @@ components: x-enum-varnames: - AMAZON_S3 ObservabilityPipelineAmazonSecurityLakeDestination: - description: The `amazon_security_lake` destination sends your logs to Amazon + description: 'The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35494,6 +35537,8 @@ components: - region - custom_source_name type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonSecurityLakeDestinationType: default: amazon_security_lake description: The destination type. Always `amazon_security_lake`. @@ -35537,6 +35582,8 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array + pipeline_type: + $ref: '#/components/schemas/ObservabilityPipelineConfigPipelineType' processors: description: A list of processor groups that transform or enrich log data. example: @@ -35573,25 +35620,38 @@ components: ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/AzureStorageDestination' - - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineSocketDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestination' + ObservabilityPipelineConfigPipelineType: + default: logs + description: The type of data being ingested. Defaults to `logs` if not specified. + enum: + - logs + - metrics + example: logs + type: string + x-enum-varnames: + - LOGS + - METRICS ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35665,45 +35725,49 @@ components: description: A processor for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' - - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' - - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: - description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike + description: 'The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + + + **Supported pipeline types:** logs' properties: compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' @@ -35731,6 +35795,8 @@ components: - inputs - encoding type: object + x-pipeline-types: + - logs ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression: description: Compression configuration for log events. properties: @@ -35774,9 +35840,12 @@ components: x-enum-varnames: - CROWDSTRIKE_NEXT_GEN_SIEM ObservabilityPipelineCustomProcessor: - description: The `custom_processor` processor transforms events using [Vector + description: 'The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35810,6 +35879,8 @@ components: - remaps - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineCustomProcessorRemap: description: Defines a single VRL remap rule with its own filtering and transformation logic. @@ -35885,7 +35956,11 @@ components: - config type: object ObservabilityPipelineDatadogAgentSource: - description: The `datadog_agent` source collects logs from the Datadog Agent. + description: 'The `datadog_agent` source collects logs/metrics from the Datadog + Agent. + + + **Supported pipeline types:** logs, metrics' properties: id: description: The unique identifier for this component. Used to reference @@ -35901,6 +35976,9 @@ components: - id - type type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineDatadogAgentSourceType: default: datadog_agent description: The source type. The value should always be `datadog_agent`. @@ -35911,7 +35989,10 @@ components: x-enum-varnames: - DATADOG_AGENT ObservabilityPipelineDatadogLogsDestination: - description: The `datadog_logs` destination forwards logs to Datadog Log Management. + description: 'The `datadog_logs` destination forwards logs to Datadog Log Management. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -35932,6 +36013,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogLogsDestinationType: default: datadog_logs description: The destination type. The value should always be `datadog_logs`. @@ -35941,9 +36024,48 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDatadogMetricsDestination: + description: 'The `datadog_metrics` destination forwards metrics to Datadog. + + + **Supported pipeline types:** metrics' + properties: + id: + description: The unique identifier for this component. + example: datadog-metrics-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - metric-tags-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineDatadogMetricsDestinationType: + default: datadog_metrics + description: The destination type. The value should always be `datadog_metrics`. + enum: + - datadog_metrics + example: datadog_metrics + type: string + x-enum-varnames: + - DATADOG_METRICS ObservabilityPipelineDatadogTagsProcessor: - description: The `datadog_tags` processor includes or excludes specific Datadog + description: 'The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + + + **Supported pipeline types:** logs' properties: action: $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction' @@ -35986,6 +36108,8 @@ components: - keys - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogTagsProcessorAction: description: The action to take on tags with matching keys. enum: @@ -36028,7 +36152,10 @@ components: - DECODE_JSON - DECODE_SYSLOG ObservabilityPipelineDedupeProcessor: - description: The `dedupe` processor removes duplicate fields in log events. + description: 'The `dedupe` processor removes duplicate fields in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36065,6 +36192,8 @@ components: - mode - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDedupeProcessorMode: description: The deduplication mode to apply to the fields. enum: @@ -36085,8 +36214,11 @@ components: x-enum-varnames: - DEDUPE ObservabilityPipelineElasticsearchDestination: - description: The `elasticsearch` destination writes logs to an Elasticsearch + description: 'The `elasticsearch` destination writes logs to an Elasticsearch cluster. + + + **Supported pipeline types:** logs' properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' @@ -36113,6 +36245,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineElasticsearchDestinationApiVersion: description: The Elasticsearch API version to use. Set to `auto` to auto-detect. enum: @@ -36266,8 +36400,11 @@ components: - path type: object ObservabilityPipelineEnrichmentTableProcessor: - description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + description: 'The `enrichment_table` processor enriches logs using a static + CSV file or GeoIP database. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36301,6 +36438,8 @@ components: - target - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineEnrichmentTableProcessorType: default: enrichment_table description: The processor type. The value should always be `enrichment_table`. @@ -36326,9 +36465,12 @@ components: - value type: object ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + description: 'The `filter` processor allows conditional processing of logs/metrics + based on a Datadog search query. Logs/metrics that match the `include` query + are passed through; others are discarded. + + + **Supported pipeline types:** logs, metrics' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36343,9 +36485,9 @@ components: example: filter-processor type: string include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. + description: A Datadog search query used to determine which logs/metrics + should pass through the filter. Logs/metrics that match this query continue + to downstream components; others are dropped. example: service:my-service type: string type: @@ -36356,6 +36498,9 @@ components: - include - enabled type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineFilterProcessorType: default: filter description: The processor type. The value should always be `filter`. @@ -36366,7 +36511,10 @@ components: x-enum-varnames: - FILTER ObservabilityPipelineFluentBitSource: - description: The `fluent_bit` source ingests logs from Fluent Bit. + description: 'The `fluent_bit` source ingests logs from Fluent Bit. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36382,6 +36530,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentBitSourceType: default: fluent_bit description: The source type. The value should always be `fluent_bit`. @@ -36392,7 +36542,10 @@ components: x-enum-varnames: - FLUENT_BIT ObservabilityPipelineFluentdSource: - description: The `fluentd` source ingests logs from a Fluentd-compatible service. + description: 'The `fluentd` source ingests logs from a Fluentd-compatible service. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36408,6 +36561,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentdSourceType: default: fluentd description: The source type. The value should always be `fluentd. @@ -36432,7 +36587,10 @@ components: from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by - log fields.' + log fields. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36462,6 +36620,8 @@ components: - type - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineGenerateMetricsProcessorType: default: generate_datadog_metrics description: The processor type. Always `generate_datadog_metrics`. @@ -36556,7 +36716,10 @@ components: - GAUGE - DISTRIBUTION ObservabilityPipelineGoogleChronicleDestination: - description: The `google_chronicle` destination sends logs to Google Chronicle. + description: 'The `google_chronicle` destination sends logs to Google Chronicle. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36590,6 +36753,8 @@ components: - inputs - customer_id type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleChronicleDestinationEncoding: description: The encoding format for the logs sent to Chronicle. enum: @@ -36613,7 +36778,10 @@ components: description: 'The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. - It requires a bucket name, GCP authentication, and metadata fields.' + It requires a bucket name, GCP authentication, and metadata fields. + + + **Supported pipeline types:** logs' properties: acl: $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' @@ -36655,6 +36823,8 @@ components: - bucket - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleCloudStorageDestinationAcl: description: Access control list setting for objects written to the bucket. enum: @@ -36697,8 +36867,11 @@ components: x-enum-varnames: - GOOGLE_CLOUD_STORAGE ObservabilityPipelineGooglePubSubDestination: - description: The `google_pubsub` destination publishes logs to a Google Cloud + description: 'The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36736,6 +36909,8 @@ components: - project - topic type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubDestinationEncoding: description: Encoding format for log events. enum: @@ -36756,8 +36931,11 @@ components: x-enum-varnames: - GOOGLE_PUBSUB ObservabilityPipelineGooglePubSubSource: - description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + description: 'The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36788,6 +36966,8 @@ components: - project - subscription type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubSourceType: default: google_pubsub description: The source type. The value should always be `google_pubsub`. @@ -36797,9 +36977,92 @@ components: type: string x-enum-varnames: - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientDestination: + description: 'The `http_client` destination sends data to an HTTP endpoint. + + + **Supported pipeline types:** logs, metrics' + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationAuthStrategy' + compression: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationEncoding' + id: + description: The unique identifier for this component. + example: http-client-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - filter-processor + items: + type: string + type: array + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationType' + required: + - id + - type + - inputs + - encoding + type: object + x-pipeline-types: + - logs + - metrics + ObservabilityPipelineHttpClientDestinationAuthStrategy: + description: HTTP authentication strategy. + enum: + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - BASIC + - BEARER + ObservabilityPipelineHttpClientDestinationCompression: + description: Compression configuration for HTTP requests. + properties: + algorithm: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + required: + - algorithm + type: object + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm: + description: Compression algorithm. + enum: + - gzip + example: gzip + type: string + x-enum-varnames: + - GZIP + ObservabilityPipelineHttpClientDestinationEncoding: + description: Encoding format for log events. + enum: + - json + example: json + type: string + x-enum-varnames: + - JSON + ObservabilityPipelineHttpClientDestinationType: + default: http_client + description: The destination type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT ObservabilityPipelineHttpClientSource: - description: The `http_client` source scrapes logs from HTTP endpoints at regular + description: 'The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' @@ -36830,6 +37093,8 @@ components: - type - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpClientSourceAuthStrategy: description: Optional authentication strategy for HTTP requests. enum: @@ -36850,8 +37115,11 @@ components: x-enum-varnames: - HTTP_CLIENT ObservabilityPipelineHttpServerSource: - description: The `http_server` source collects logs over HTTP POST from external + description: 'The `http_server` source collects logs over HTTP POST from external services. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' @@ -36871,6 +37139,8 @@ components: - auth_strategy - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpServerSourceAuthStrategy: description: HTTP authentication method. enum: @@ -36891,7 +37161,10 @@ components: x-enum-varnames: - HTTP_SERVER ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + description: 'The `kafka` source ingests data from Apache Kafka topics. + + + **Supported pipeline types:** logs' properties: group_id: description: Consumer group ID used by the Kafka client. @@ -36930,6 +37203,8 @@ components: - group_id - topics type: object + x-pipeline-types: + - logs ObservabilityPipelineKafkaSourceLibrdkafkaOption: description: Represents a key-value pair used to configure low-level `librdkafka` client options for Kafka sources, such as timeouts, buffer sizes, and security @@ -36964,7 +37239,10 @@ components: x-enum-varnames: - KAFKA ObservabilityPipelineLogstashSource: - description: The `logstash` source ingests logs from a Logstash forwarder. + description: 'The `logstash` source ingests logs from a Logstash forwarder. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36980,6 +37258,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineLogstashSourceType: default: logstash description: The source type. The value should always be `logstash`. @@ -37004,13 +37284,104 @@ components: - name - value type: object + ObservabilityPipelineMetricTagsProcessor: + description: 'The `metric_tags` processor filters metrics based on their tags + using Datadog tag key patterns. + + + **Supported pipeline types:** metrics' + properties: + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: metric-tags-processor + type: string + include: + description: A Datadog search query used to determine which metrics this + processor targets. + example: '*' + type: string + rules: + description: A list of rules for filtering metric tags. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRule' + maxItems: 100 + minItems: 1 + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorType' + required: + - id + - type + - include + - rules + - enabled + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineMetricTagsProcessorRule: + description: Defines a rule for filtering metric tags based on key patterns. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleAction' + keys: + description: A list of tag keys to include or exclude. + example: + - env + - service + - version + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleMode' + required: + - mode + - action + - keys + type: object + ObservabilityPipelineMetricTagsProcessorRuleAction: + description: The action to take on tags with matching keys. + enum: + - include + - exclude + example: include + type: string + x-enum-varnames: + - INCLUDE + - EXCLUDE + ObservabilityPipelineMetricTagsProcessorRuleMode: + description: The processing mode for tag filtering. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineMetricTagsProcessorType: + default: metric_tags + description: The processor type. The value should always be `metric_tags`. + enum: + - metric_tags + example: metric_tags + type: string + x-enum-varnames: + - METRIC_TAGS ObservabilityPipelineMetricValue: description: Specifies how the value of the generated metric is computed. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' ObservabilityPipelineNewRelicDestination: - description: The `new_relic` destination sends logs to the New Relic platform. + description: 'The `new_relic` destination sends logs to the New Relic platform. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37034,6 +37405,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineNewRelicDestinationRegion: description: The New Relic region. enum: @@ -37054,8 +37427,11 @@ components: x-enum-varnames: - NEW_RELIC ObservabilityPipelineOcsfMapperProcessor: - description: The `ocsf_mapper` processor transforms logs into the OCSF schema + description: 'The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37087,6 +37463,8 @@ components: - mappings - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineOcsfMapperProcessorMapping: description: Defines how specific events are transformed to OCSF using a mapping configuration. @@ -37146,7 +37524,10 @@ components: - OKTA_SYSTEM_LOG_AUTHENTICATION - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC ObservabilityPipelineOpenSearchDestination: - description: The `opensearch` destination writes logs to an OpenSearch cluster. + description: 'The `opensearch` destination writes logs to an OpenSearch cluster. + + + **Supported pipeline types:** logs' properties: bulk_index: description: The index to write logs to. @@ -37171,6 +37552,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineOpenSearchDestinationType: default: opensearch description: The destination type. The value should always be `opensearch`. @@ -37181,8 +37564,11 @@ components: x-enum-varnames: - OPENSEARCH ObservabilityPipelineParseGrokProcessor: - description: The `parse_grok` processor extracts structured fields from unstructured + description: 'The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + + + **Supported pipeline types:** logs' properties: disable_library_rules: default: false @@ -37221,6 +37607,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseGrokProcessorRule: description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule defines how to extract structured fields @@ -37300,9 +37688,12 @@ components: x-enum-varnames: - PARSE_GROK ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field + description: 'The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37334,6 +37725,8 @@ components: - field - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseJSONProcessorType: default: parse_json description: The processor type. The value should always be `parse_json`. @@ -37355,9 +37748,12 @@ components: - SCRAMNOT_SHANOT_256 - SCRAMNOT_SHANOT_512 ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match + description: 'The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37418,6 +37814,8 @@ components: - limit - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineQuotaProcessorLimit: description: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. @@ -37489,8 +37887,11 @@ components: x-enum-varnames: - QUOTA ObservabilityPipelineReduceProcessor: - description: The `reduce` processor aggregates and merges logs based on matching + description: 'The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37531,6 +37932,8 @@ components: - merge_strategies - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineReduceProcessorMergeStrategy: description: Defines how a specific field should be merged across grouped events. properties: @@ -37584,7 +37987,10 @@ components: x-enum-varnames: - REDUCE ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + description: 'The `remove_fields` processor deletes specified fields from logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37620,6 +38026,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRemoveFieldsProcessorType: default: remove_fields description: The processor type. The value should always be `remove_fields`. @@ -37630,7 +38038,10 @@ components: x-enum-varnames: - REMOVE_FIELDS ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + description: 'The `rename_fields` processor changes field names. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37665,6 +38076,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRenameFieldsProcessorField: description: Defines how to rename a field in log events. properties: @@ -37696,8 +38109,11 @@ components: x-enum-varnames: - RENAME_FIELDS ObservabilityPipelineRsyslogDestination: - description: The `rsyslog` destination forwards logs to an external `rsyslog` + description: 'The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37726,6 +38142,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogDestinationType: default: rsyslog description: The destination type. The value should always be `rsyslog`. @@ -37736,8 +38154,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineRsyslogSource: - description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` - server using the syslog protocol. + description: 'The `rsyslog` source listens for logs over TCP or UDP from an + `rsyslog` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -37756,6 +38177,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogSourceType: default: rsyslog description: The source type. The value should always be `rsyslog`. @@ -37766,8 +38189,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineSampleProcessor: - description: The `sample` processor allows probabilistic sampling of logs at + description: 'The `sample` processor allows probabilistic sampling of logs at a fixed rate. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37805,6 +38231,8 @@ components: - include - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSampleProcessorType: default: sample description: The processor type. The value should always be `sample`. @@ -37815,8 +38243,11 @@ components: x-enum-varnames: - SAMPLE ObservabilityPipelineSensitiveDataScannerProcessor: - description: The `sensitive_data_scanner` processor detects and optionally redacts - sensitive data in log events. + description: 'The `sensitive_data_scanner` processor detects and optionally + redacts sensitive data in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37850,6 +38281,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSensitiveDataScannerProcessorAction: description: Defines what action to take when sensitive data is matched. oneOf: @@ -38167,7 +38600,10 @@ components: x-enum-varnames: - SENSITIVE_DATA_SCANNER ObservabilityPipelineSentinelOneDestination: - description: The `sentinel_one` destination sends logs to SentinelOne. + description: 'The `sentinel_one` destination sends logs to SentinelOne. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38191,6 +38627,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineSentinelOneDestinationRegion: description: The SentinelOne region to send logs to. enum: @@ -38215,8 +38653,11 @@ components: x-enum-varnames: - SENTINEL_ONE ObservabilityPipelineSocketDestination: - description: The `socket` destination sends logs over TCP or UDP to a remote + description: 'The `socket` destination sends logs over TCP or UDP to a remote server. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' @@ -38249,6 +38690,8 @@ components: - framing - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketDestinationEncoding: description: Encoding format for log events. enum: @@ -38343,7 +38786,10 @@ components: x-enum-varnames: - SOCKET ObservabilityPipelineSocketSource: - description: The `socket` source ingests logs over TCP or UDP. + description: 'The `socket` source ingests logs over TCP or UDP. + + + **Supported pipeline types:** logs' properties: framing: $ref: '#/components/schemas/ObservabilityPipelineSocketSourceFraming' @@ -38366,6 +38812,8 @@ components: - mode - framing type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketSourceFraming: description: Framing method configuration for the socket source. oneOf: @@ -38507,8 +38955,11 @@ components: - attributes type: object ObservabilityPipelineSplunkHecDestination: - description: The `splunk_hec` destination forwards logs to Splunk using the + description: 'The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + + + **Supported pipeline types:** logs' properties: auto_extract_timestamp: description: 'If `true`, Splunk tries to extract timestamps from incoming @@ -38548,6 +38999,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecDestinationEncoding: description: Encoding format for log events. enum: @@ -38568,8 +39021,11 @@ components: x-enum-varnames: - SPLUNK_HEC ObservabilityPipelineSplunkHecSource: - description: The `splunk_hec` source implements the Splunk HTTP Event Collector + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38585,6 +39041,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecSourceType: default: splunk_hec description: The source type. Always `splunk_hec`. @@ -38598,7 +39056,10 @@ components: description: 'The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. - TLS is supported for secure transmission.' + TLS is supported for secure transmission. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38614,6 +39075,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkTcpSourceType: default: splunk_tcp description: The source type. Always `splunk_tcp`. @@ -38624,7 +39087,10 @@ components: x-enum-varnames: - SPLUNK_TCP ObservabilityPipelineSumoLogicDestination: - description: The `sumo_logic` destination forwards logs to Sumo Logic. + description: 'The `sumo_logic` destination forwards logs to Sumo Logic. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' @@ -38665,6 +39131,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicDestinationEncoding: description: The output encoding format. enum: @@ -38702,7 +39170,10 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSumoLogicSource: - description: The `sumo_logic` source receives logs from Sumo Logic collectors. + description: 'The `sumo_logic` source receives logs from Sumo Logic collectors. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38716,6 +39187,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicSourceType: default: sumo_logic description: The source type. The value should always be `sumo_logic`. @@ -38726,8 +39199,11 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSyslogNgDestination: - description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + description: 'The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38756,6 +39232,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgDestinationType: default: syslog_ng description: The destination type. The value should always be `syslog_ng`. @@ -38766,8 +39244,11 @@ components: x-enum-varnames: - SYSLOG_NG ObservabilityPipelineSyslogNgSource: - description: The `syslog_ng` source listens for logs over TCP or UDP from a + description: 'The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38786,6 +39267,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgSourceType: default: syslog_ng description: The source type. The value should always be `syslog_ng`. @@ -38806,8 +39289,11 @@ components: - TCP - UDP ObservabilityPipelineThrottleProcessor: - description: The `throttle` processor limits the number of events that pass + description: 'The `throttle` processor limits the number of events that pass through over a given time window. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -38853,6 +39339,8 @@ components: - window - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineThrottleProcessorType: default: throttle description: The processor type. The value should always be `throttle`. diff --git a/features/v2/observability_pipelines.feature b/features/v2/observability_pipelines.feature index c43fa8b3b76..a9b17ec7fdf 100644 --- a/features/v2/observability_pipelines.feature +++ b/features/v2/observability_pipelines.feature @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index 3e6e421122a..efd1a0f8834 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -3155,6 +3155,7 @@ def overrides "v2.observability_pipeline_aws_auth" => "ObservabilityPipelineAwsAuth", "v2.observability_pipeline_config" => "ObservabilityPipelineConfig", "v2.observability_pipeline_config_destination_item" => "ObservabilityPipelineConfigDestinationItem", + "v2.observability_pipeline_config_pipeline_type" => "ObservabilityPipelineConfigPipelineType", "v2.observability_pipeline_config_processor_group" => "ObservabilityPipelineConfigProcessorGroup", "v2.observability_pipeline_config_processor_item" => "ObservabilityPipelineConfigProcessorItem", "v2.observability_pipeline_config_source_item" => "ObservabilityPipelineConfigSourceItem", @@ -3172,6 +3173,8 @@ def overrides "v2.observability_pipeline_datadog_agent_source_type" => "ObservabilityPipelineDatadogAgentSourceType", "v2.observability_pipeline_datadog_logs_destination" => "ObservabilityPipelineDatadogLogsDestination", "v2.observability_pipeline_datadog_logs_destination_type" => "ObservabilityPipelineDatadogLogsDestinationType", + "v2.observability_pipeline_datadog_metrics_destination" => "ObservabilityPipelineDatadogMetricsDestination", + "v2.observability_pipeline_datadog_metrics_destination_type" => "ObservabilityPipelineDatadogMetricsDestinationType", "v2.observability_pipeline_datadog_tags_processor" => "ObservabilityPipelineDatadogTagsProcessor", "v2.observability_pipeline_datadog_tags_processor_action" => "ObservabilityPipelineDatadogTagsProcessorAction", "v2.observability_pipeline_datadog_tags_processor_mode" => "ObservabilityPipelineDatadogTagsProcessorMode", @@ -3221,6 +3224,12 @@ def overrides "v2.observability_pipeline_google_pub_sub_destination_type" => "ObservabilityPipelineGooglePubSubDestinationType", "v2.observability_pipeline_google_pub_sub_source" => "ObservabilityPipelineGooglePubSubSource", "v2.observability_pipeline_google_pub_sub_source_type" => "ObservabilityPipelineGooglePubSubSourceType", + "v2.observability_pipeline_http_client_destination" => "ObservabilityPipelineHttpClientDestination", + "v2.observability_pipeline_http_client_destination_auth_strategy" => "ObservabilityPipelineHttpClientDestinationAuthStrategy", + "v2.observability_pipeline_http_client_destination_compression" => "ObservabilityPipelineHttpClientDestinationCompression", + "v2.observability_pipeline_http_client_destination_compression_algorithm" => "ObservabilityPipelineHttpClientDestinationCompressionAlgorithm", + "v2.observability_pipeline_http_client_destination_encoding" => "ObservabilityPipelineHttpClientDestinationEncoding", + "v2.observability_pipeline_http_client_destination_type" => "ObservabilityPipelineHttpClientDestinationType", "v2.observability_pipeline_http_client_source" => "ObservabilityPipelineHttpClientSource", "v2.observability_pipeline_http_client_source_auth_strategy" => "ObservabilityPipelineHttpClientSourceAuthStrategy", "v2.observability_pipeline_http_client_source_type" => "ObservabilityPipelineHttpClientSourceType", @@ -3234,6 +3243,11 @@ def overrides "v2.observability_pipeline_logstash_source" => "ObservabilityPipelineLogstashSource", "v2.observability_pipeline_logstash_source_type" => "ObservabilityPipelineLogstashSourceType", "v2.observability_pipeline_metadata_entry" => "ObservabilityPipelineMetadataEntry", + "v2.observability_pipeline_metric_tags_processor" => "ObservabilityPipelineMetricTagsProcessor", + "v2.observability_pipeline_metric_tags_processor_rule" => "ObservabilityPipelineMetricTagsProcessorRule", + "v2.observability_pipeline_metric_tags_processor_rule_action" => "ObservabilityPipelineMetricTagsProcessorRuleAction", + "v2.observability_pipeline_metric_tags_processor_rule_mode" => "ObservabilityPipelineMetricTagsProcessorRuleMode", + "v2.observability_pipeline_metric_tags_processor_type" => "ObservabilityPipelineMetricTagsProcessorType", "v2.observability_pipeline_metric_value" => "ObservabilityPipelineMetricValue", "v2.observability_pipeline_new_relic_destination" => "ObservabilityPipelineNewRelicDestination", "v2.observability_pipeline_new_relic_destination_region" => "ObservabilityPipelineNewRelicDestinationRegion", diff --git a/lib/datadog_api_client/v2/models/azure_storage_destination.rb b/lib/datadog_api_client/v2/models/azure_storage_destination.rb index ef29c2cade8..b17e732af9c 100644 --- a/lib/datadog_api_client/v2/models/azure_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/azure_storage_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `azure_storage` destination forwards logs to an Azure Blob Storage container. + # + # **Supported pipeline types:** logs class AzureStorageDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb index 03266b73340..4948de659f0 100644 --- a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb +++ b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + # + # **Supported pipeline types:** logs class MicrosoftSentinelDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb index 03fce60c6e9..fd17f878e32 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `add_env_vars` processor adds environment variable values to log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAddEnvVarsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb index ee063e97ea9..cfc3ff081a7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `add_fields` processor adds static key-value fields to logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAddFieldsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb index bd174706a44..ce0598e59e6 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonDataFirehoseSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb index ab8721363e0..44f1c6db5b6 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonOpenSearchDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb index 6e74bafcd33..aec7f0bbcbf 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonS3Destination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb index a764ab83045..70f13572719 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `amazon_s3` source ingests logs from an Amazon S3 bucket. # It supports AWS authentication and TLS encryption. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonS3Source include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb index 6904637a972..6bc480ae997 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonSecurityLakeDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb index 04005274e02..1d3511feca5 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb @@ -24,6 +24,9 @@ class ObservabilityPipelineConfig # A list of destination components where processed logs are sent. attr_reader :destinations + # The type of data being ingested. Defaults to `logs` if not specified. + attr_accessor :pipeline_type + # A list of processor groups that transform or enrich log data. attr_accessor :processors @@ -37,6 +40,7 @@ class ObservabilityPipelineConfig def self.attribute_map { :'destinations' => :'destinations', + :'pipeline_type' => :'pipeline_type', :'processors' => :'processors', :'sources' => :'sources' } @@ -47,6 +51,7 @@ def self.attribute_map def self.openapi_types { :'destinations' => :'Array', + :'pipeline_type' => :'ObservabilityPipelineConfigPipelineType', :'processors' => :'Array', :'sources' => :'Array' } @@ -76,6 +81,10 @@ def initialize(attributes = {}) end end + if attributes.key?(:'pipeline_type') + self.pipeline_type = attributes[:'pipeline_type'] + end + if attributes.key?(:'processors') if (value = attributes[:'processors']).is_a?(Array) self.processors = value @@ -145,6 +154,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && destinations == o.destinations && + pipeline_type == o.pipeline_type && processors == o.processors && sources == o.sources && additional_properties == o.additional_properties @@ -154,7 +164,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [destinations, processors, sources, additional_properties].hash + [destinations, pipeline_type, processors, sources, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb index 00386a1aa19..4a5b6d21413 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb @@ -26,25 +26,27 @@ class << self # List of class defined in oneOf (OpenAPI v3) def openapi_one_of [ - :'ObservabilityPipelineDatadogLogsDestination', + :'ObservabilityPipelineHttpClientDestination', + :'ObservabilityPipelineAmazonOpenSearchDestination', :'ObservabilityPipelineAmazonS3Destination', - :'ObservabilityPipelineGoogleCloudStorageDestination', - :'ObservabilityPipelineSplunkHecDestination', - :'ObservabilityPipelineSumoLogicDestination', - :'ObservabilityPipelineElasticsearchDestination', - :'ObservabilityPipelineRsyslogDestination', - :'ObservabilityPipelineSyslogNgDestination', + :'ObservabilityPipelineAmazonSecurityLakeDestination', :'AzureStorageDestination', - :'MicrosoftSentinelDestination', + :'ObservabilityPipelineCrowdStrikeNextGenSiemDestination', + :'ObservabilityPipelineDatadogLogsDestination', + :'ObservabilityPipelineElasticsearchDestination', :'ObservabilityPipelineGoogleChronicleDestination', + :'ObservabilityPipelineGoogleCloudStorageDestination', + :'ObservabilityPipelineGooglePubSubDestination', + :'MicrosoftSentinelDestination', :'ObservabilityPipelineNewRelicDestination', - :'ObservabilityPipelineSentinelOneDestination', :'ObservabilityPipelineOpenSearchDestination', - :'ObservabilityPipelineAmazonOpenSearchDestination', + :'ObservabilityPipelineRsyslogDestination', + :'ObservabilityPipelineSentinelOneDestination', :'ObservabilityPipelineSocketDestination', - :'ObservabilityPipelineAmazonSecurityLakeDestination', - :'ObservabilityPipelineCrowdStrikeNextGenSiemDestination', - :'ObservabilityPipelineGooglePubSubDestination' + :'ObservabilityPipelineSplunkHecDestination', + :'ObservabilityPipelineSumoLogicDestination', + :'ObservabilityPipelineSyslogNgDestination', + :'ObservabilityPipelineDatadogMetricsDestination' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb new file mode 100644 index 00000000000..ef043dc88c5 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The type of data being ingested. Defaults to `logs` if not specified. + class ObservabilityPipelineConfigPipelineType + include BaseEnumModel + + LOGS = "logs".freeze + METRICS = "metrics".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb index 334f6e14d8a..e23e3ce61ef 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb @@ -27,23 +27,24 @@ class << self def openapi_one_of [ :'ObservabilityPipelineFilterProcessor', + :'ObservabilityPipelineAddEnvVarsProcessor', + :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineCustomProcessor', + :'ObservabilityPipelineDatadogTagsProcessor', + :'ObservabilityPipelineDedupeProcessor', + :'ObservabilityPipelineEnrichmentTableProcessor', + :'ObservabilityPipelineGenerateMetricsProcessor', + :'ObservabilityPipelineOcsfMapperProcessor', + :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineParseJSONProcessor', :'ObservabilityPipelineQuotaProcessor', - :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineReduceProcessor', :'ObservabilityPipelineRemoveFieldsProcessor', :'ObservabilityPipelineRenameFieldsProcessor', - :'ObservabilityPipelineGenerateMetricsProcessor', :'ObservabilityPipelineSampleProcessor', - :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineSensitiveDataScannerProcessor', - :'ObservabilityPipelineOcsfMapperProcessor', - :'ObservabilityPipelineAddEnvVarsProcessor', - :'ObservabilityPipelineDedupeProcessor', - :'ObservabilityPipelineEnrichmentTableProcessor', - :'ObservabilityPipelineReduceProcessor', :'ObservabilityPipelineThrottleProcessor', - :'ObservabilityPipelineCustomProcessor', - :'ObservabilityPipelineDatadogTagsProcessor' + :'ObservabilityPipelineMetricTagsProcessor' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb index 6c5e58312de..e951059660a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb @@ -26,22 +26,22 @@ class << self # List of class defined in oneOf (OpenAPI v3) def openapi_one_of [ - :'ObservabilityPipelineKafkaSource', :'ObservabilityPipelineDatadogAgentSource', - :'ObservabilityPipelineSplunkTcpSource', - :'ObservabilityPipelineSplunkHecSource', + :'ObservabilityPipelineAmazonDataFirehoseSource', :'ObservabilityPipelineAmazonS3Source', - :'ObservabilityPipelineFluentdSource', :'ObservabilityPipelineFluentBitSource', - :'ObservabilityPipelineHttpServerSource', - :'ObservabilityPipelineSumoLogicSource', - :'ObservabilityPipelineRsyslogSource', - :'ObservabilityPipelineSyslogNgSource', - :'ObservabilityPipelineAmazonDataFirehoseSource', + :'ObservabilityPipelineFluentdSource', :'ObservabilityPipelineGooglePubSubSource', :'ObservabilityPipelineHttpClientSource', + :'ObservabilityPipelineHttpServerSource', + :'ObservabilityPipelineKafkaSource', :'ObservabilityPipelineLogstashSource', - :'ObservabilityPipelineSocketSource' + :'ObservabilityPipelineRsyslogSource', + :'ObservabilityPipelineSocketSource', + :'ObservabilityPipelineSplunkHecSource', + :'ObservabilityPipelineSplunkTcpSource', + :'ObservabilityPipelineSumoLogicSource', + :'ObservabilityPipelineSyslogNgSource' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb index 827d8cc8c39..3a24137f6c7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + # + # **Supported pipeline types:** logs class ObservabilityPipelineCrowdStrikeNextGenSiemDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb index 5b554a17df5..4e9800ce0e5 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + # + # **Supported pipeline types:** logs class ObservabilityPipelineCustomProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb index 27b72437329..4147195c688 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb @@ -17,7 +17,9 @@ require 'time' module DatadogAPIClient::V2 - # The `datadog_agent` source collects logs from the Datadog Agent. + # The `datadog_agent` source collects logs/metrics from the Datadog Agent. + # + # **Supported pipeline types:** logs, metrics class ObservabilityPipelineDatadogAgentSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb index 3b08d1fd9c2..ea784246329 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `datadog_logs` destination forwards logs to Datadog Log Management. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDatadogLogsDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb new file mode 100644 index 00000000000..e94fadc3fa3 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb @@ -0,0 +1,169 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `datadog_metrics` destination forwards metrics to Datadog. + # + # **Supported pipeline types:** metrics + class ObservabilityPipelineDatadogMetricsDestination + include BaseGenericModel + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the input for this component. + attr_reader :inputs + + # The destination type. The value should always be `datadog_metrics`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'inputs' => :'inputs', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'inputs' => :'Array', + :'type' => :'ObservabilityPipelineDatadogMetricsDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineDatadogMetricsDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + inputs == o.inputs && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, inputs, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb new file mode 100644 index 00000000000..c32b6952246 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `datadog_metrics`. + class ObservabilityPipelineDatadogMetricsDestinationType + include BaseEnumModel + + DATADOG_METRICS = "datadog_metrics".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb index 5eeb0091f9d..f406e6b5a8e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDatadogTagsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb index 4420f11b474..3b73f9a136f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `dedupe` processor removes duplicate fields in log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDedupeProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb index f49df443a04..f04cc5a9e7a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `elasticsearch` destination writes logs to an Elasticsearch cluster. + # + # **Supported pipeline types:** logs class ObservabilityPipelineElasticsearchDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb index ea73dec4ede..f3353e3817d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `enrichment_table` processor enriches logs using a static CSV file or GeoIP database. + # + # **Supported pipeline types:** logs class ObservabilityPipelineEnrichmentTableProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb index c7339f0900e..f15e183edf7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb @@ -17,7 +17,9 @@ require 'time' module DatadogAPIClient::V2 - # The `filter` processor allows conditional processing of logs based on a Datadog search query. Logs that match the `include` query are passed through; others are discarded. + # The `filter` processor allows conditional processing of logs/metrics based on a Datadog search query. Logs/metrics that match the `include` query are passed through; others are discarded. + # + # **Supported pipeline types:** logs, metrics class ObservabilityPipelineFilterProcessor include BaseGenericModel @@ -30,7 +32,7 @@ class ObservabilityPipelineFilterProcessor # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). attr_reader :id - # A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + # A Datadog search query used to determine which logs/metrics should pass through the filter. Logs/metrics that match this query continue to downstream components; others are dropped. attr_reader :include # The processor type. The value should always be `filter`. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb index bbcc1a10505..275c21c9a67 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `fluent_bit` source ingests logs from Fluent Bit. + # + # **Supported pipeline types:** logs class ObservabilityPipelineFluentBitSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb index 1ab9185baaa..86005282ab6 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `fluentd` source ingests logs from a Fluentd-compatible service. + # + # **Supported pipeline types:** logs class ObservabilityPipelineFluentdSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb index 55d8cf77969..113af4981ae 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `generate_datadog_metrics` processor creates custom metrics from logs and sends them to Datadog. # Metrics can be counters, gauges, or distributions and optionally grouped by log fields. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGenerateMetricsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb index 73b6a5d47df..93eed57d636 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_chronicle` destination sends logs to Google Chronicle. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGoogleChronicleDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb index ae95f0729d9..e1572e3804a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. # It requires a bucket name, GCP authentication, and metadata fields. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGoogleCloudStorageDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb index fe396691e12..f85845c5aea 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGooglePubSubDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb index 73fff4e5042..9bd5196b5d8 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGooglePubSubSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb new file mode 100644 index 00000000000..b113b66cef9 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb @@ -0,0 +1,220 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `http_client` destination sends data to an HTTP endpoint. + # + # **Supported pipeline types:** logs, metrics + class ObservabilityPipelineHttpClientDestination + include BaseGenericModel + + # HTTP authentication strategy. + attr_accessor :auth_strategy + + # Compression configuration for HTTP requests. + attr_accessor :compression + + # Encoding format for log events. + attr_reader :encoding + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the input for this component. + attr_reader :inputs + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The destination type. The value should always be `http_client`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'auth_strategy' => :'auth_strategy', + :'compression' => :'compression', + :'encoding' => :'encoding', + :'id' => :'id', + :'inputs' => :'inputs', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'auth_strategy' => :'ObservabilityPipelineHttpClientDestinationAuthStrategy', + :'compression' => :'ObservabilityPipelineHttpClientDestinationCompression', + :'encoding' => :'ObservabilityPipelineHttpClientDestinationEncoding', + :'id' => :'String', + :'inputs' => :'Array', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineHttpClientDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineHttpClientDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'auth_strategy') + self.auth_strategy = attributes[:'auth_strategy'] + end + + if attributes.key?(:'compression') + self.compression = attributes[:'compression'] + end + + if attributes.key?(:'encoding') + self.encoding = attributes[:'encoding'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @encoding.nil? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param encoding [Object] Object to be assigned + # @!visibility private + def encoding=(encoding) + if encoding.nil? + fail ArgumentError, 'invalid value for "encoding", encoding cannot be nil.' + end + @encoding = encoding + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + auth_strategy == o.auth_strategy && + compression == o.compression && + encoding == o.encoding && + id == o.id && + inputs == o.inputs && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [auth_strategy, compression, encoding, id, inputs, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb new file mode 100644 index 00000000000..90d7724e54a --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # HTTP authentication strategy. + class ObservabilityPipelineHttpClientDestinationAuthStrategy + include BaseEnumModel + + BASIC = "basic".freeze + BEARER = "bearer".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb new file mode 100644 index 00000000000..705e0d42f63 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb @@ -0,0 +1,123 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression configuration for HTTP requests. + class ObservabilityPipelineHttpClientDestinationCompression + include BaseGenericModel + + # Compression algorithm. + attr_reader :algorithm + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'algorithm' => :'algorithm' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'algorithm' => :'ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineHttpClientDestinationCompression` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'algorithm') + self.algorithm = attributes[:'algorithm'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @algorithm.nil? + true + end + + # Custom attribute writer method with validation + # @param algorithm [Object] Object to be assigned + # @!visibility private + def algorithm=(algorithm) + if algorithm.nil? + fail ArgumentError, 'invalid value for "algorithm", algorithm cannot be nil.' + end + @algorithm = algorithm + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + algorithm == o.algorithm && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [algorithm, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb new file mode 100644 index 00000000000..747413a6726 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression algorithm. + class ObservabilityPipelineHttpClientDestinationCompressionAlgorithm + include BaseEnumModel + + GZIP = "gzip".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb new file mode 100644 index 00000000000..fd8b88c0335 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Encoding format for log events. + class ObservabilityPipelineHttpClientDestinationEncoding + include BaseEnumModel + + JSON = "json".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb new file mode 100644 index 00000000000..dd91c9255ea --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `http_client`. + class ObservabilityPipelineHttpClientDestinationType + include BaseEnumModel + + HTTP_CLIENT = "http_client".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb index 7a11460ae03..5d71ac3e24a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + # + # **Supported pipeline types:** logs class ObservabilityPipelineHttpClientSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb index 80c08b3a58d..271315644f1 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `http_server` source collects logs over HTTP POST from external services. + # + # **Supported pipeline types:** logs class ObservabilityPipelineHttpServerSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb index 33c4b1649b1..794a6d6e26d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `kafka` source ingests data from Apache Kafka topics. + # + # **Supported pipeline types:** logs class ObservabilityPipelineKafkaSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb index 3a9550b0bb4..ce22b2fe350 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `logstash` source ingests logs from a Logstash forwarder. + # + # **Supported pipeline types:** logs class ObservabilityPipelineLogstashSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb new file mode 100644 index 00000000000..f4f5cfa3aa7 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb @@ -0,0 +1,219 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `metric_tags` processor filters metrics based on their tags using Datadog tag key patterns. + # + # **Supported pipeline types:** metrics + class ObservabilityPipelineMetricTagsProcessor + include BaseGenericModel + + # Whether this processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which metrics this processor targets. + attr_reader :include + + # A list of rules for filtering metric tags. + attr_reader :rules + + # The processor type. The value should always be `metric_tags`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'rules' => :'rules', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'rules' => :'Array', + :'type' => :'ObservabilityPipelineMetricTagsProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMetricTagsProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'rules') + if (value = attributes[:'rules']).is_a?(Array) + self.rules = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @rules.nil? + return false if @rules.length > 100 + return false if @rules.length < 1 + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param rules [Object] Object to be assigned + # @!visibility private + def rules=(rules) + if rules.nil? + fail ArgumentError, 'invalid value for "rules", rules cannot be nil.' + end + if rules.length > 100 + fail ArgumentError, 'invalid value for "rules", number of items must be less than or equal to 100.' + end + if rules.length < 1 + fail ArgumentError, 'invalid value for "rules", number of items must be greater than or equal to 1.' + end + @rules = rules + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + enabled == o.enabled && + id == o.id && + include == o.include && + rules == o.rules && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [enabled, id, include, rules, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb new file mode 100644 index 00000000000..fb083ab51a1 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb @@ -0,0 +1,167 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Defines a rule for filtering metric tags based on key patterns. + class ObservabilityPipelineMetricTagsProcessorRule + include BaseGenericModel + + # The action to take on tags with matching keys. + attr_reader :action + + # A list of tag keys to include or exclude. + attr_reader :keys + + # The processing mode for tag filtering. + attr_reader :mode + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'action' => :'action', + :'keys' => :'keys', + :'mode' => :'mode' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'action' => :'ObservabilityPipelineMetricTagsProcessorRuleAction', + :'keys' => :'Array', + :'mode' => :'ObservabilityPipelineMetricTagsProcessorRuleMode' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMetricTagsProcessorRule` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'action') + self.action = attributes[:'action'] + end + + if attributes.key?(:'keys') + if (value = attributes[:'keys']).is_a?(Array) + self.keys = value + end + end + + if attributes.key?(:'mode') + self.mode = attributes[:'mode'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @action.nil? + return false if @keys.nil? + return false if @mode.nil? + true + end + + # Custom attribute writer method with validation + # @param action [Object] Object to be assigned + # @!visibility private + def action=(action) + if action.nil? + fail ArgumentError, 'invalid value for "action", action cannot be nil.' + end + @action = action + end + + # Custom attribute writer method with validation + # @param keys [Object] Object to be assigned + # @!visibility private + def keys=(keys) + if keys.nil? + fail ArgumentError, 'invalid value for "keys", keys cannot be nil.' + end + @keys = keys + end + + # Custom attribute writer method with validation + # @param mode [Object] Object to be assigned + # @!visibility private + def mode=(mode) + if mode.nil? + fail ArgumentError, 'invalid value for "mode", mode cannot be nil.' + end + @mode = mode + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + action == o.action && + keys == o.keys && + mode == o.mode && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [action, keys, mode, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb new file mode 100644 index 00000000000..253bdb73f64 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The action to take on tags with matching keys. + class ObservabilityPipelineMetricTagsProcessorRuleAction + include BaseEnumModel + + INCLUDE = "include".freeze + EXCLUDE = "exclude".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb new file mode 100644 index 00000000000..019343fe1a3 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processing mode for tag filtering. + class ObservabilityPipelineMetricTagsProcessorRuleMode + include BaseEnumModel + + FILTER = "filter".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb new file mode 100644 index 00000000000..0ddd08d1d7c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `metric_tags`. + class ObservabilityPipelineMetricTagsProcessorType + include BaseEnumModel + + METRIC_TAGS = "metric_tags".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb index d8705a24d96..79118217199 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `new_relic` destination sends logs to the New Relic platform. + # + # **Supported pipeline types:** logs class ObservabilityPipelineNewRelicDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb index a483ca5bbde..c76613e22bc 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + # + # **Supported pipeline types:** logs class ObservabilityPipelineOcsfMapperProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb index 2120c2eef85..09331c31ee2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `opensearch` destination writes logs to an OpenSearch cluster. + # + # **Supported pipeline types:** logs class ObservabilityPipelineOpenSearchDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb index 3fa75b46f8a..d93b1dc06f7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + # + # **Supported pipeline types:** logs class ObservabilityPipelineParseGrokProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb index ef5839c3709..c692004ed65 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + # + # **Supported pipeline types:** logs class ObservabilityPipelineParseJSONProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb index 48b8ef8622a..86332ff3af0 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb @@ -17,7 +17,9 @@ require 'time' module DatadogAPIClient::V2 - # The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + # The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + # + # **Supported pipeline types:** logs class ObservabilityPipelineQuotaProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb index 1e5b4c0c8db..6a638e3ef62 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + # + # **Supported pipeline types:** logs class ObservabilityPipelineReduceProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb index a6c12fffa56..cb54c09855b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `remove_fields` processor deletes specified fields from logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRemoveFieldsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb index a0ab10e93ae..32130653789 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `rename_fields` processor changes field names. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRenameFieldsProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb index 4c15ba87ef6..c3151af7153 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRsyslogDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb index 51a55a2daf1..1f343d8e4fe 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` server using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRsyslogSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb index e8e7e7af2b7..377d28f8449 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sample` processor allows probabilistic sampling of logs at a fixed rate. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSampleProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb index 898739512aa..ae85fbb9e62 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sensitive_data_scanner` processor detects and optionally redacts sensitive data in log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSensitiveDataScannerProcessor include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb index 9d0af0150f2..4b3548624a7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sentinel_one` destination sends logs to SentinelOne. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSentinelOneDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb index bbe02582079..5a8c561616c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `socket` destination sends logs over TCP or UDP to a remote server. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSocketDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb index 17356b2960a..b1c85ce0dab 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `socket` source ingests logs over TCP or UDP. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSocketSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb index dd77cfec925..a3a8137492a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkHecDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb index f3402f6b6db..9cf5eb56711 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkHecSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb index 66d3633cd1b..2f91c150b70 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. # TLS is supported for secure transmission. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkTcpSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb index 6193fb57f02..7a3c31742c5 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sumo_logic` destination forwards logs to Sumo Logic. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSumoLogicDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb index e4b3bcf3c83..5840844f276 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sumo_logic` source receives logs from Sumo Logic collectors. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSumoLogicSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb index 045dcfe9040..132ffd70a89 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSyslogNgDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb index 85f107e721c..2666604e126 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSyslogNgSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb index 3d19ef6446c..74750aec719 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `throttle` processor limits the number of events that pass through over a given time window. + # + # **Supported pipeline types:** logs class ObservabilityPipelineThrottleProcessor include BaseGenericModel