Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36030,6 +36030,8 @@ components:
description: The index to write logs to in Elasticsearch.
example: logs-index
type: string
data_stream:
$ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream'
id:
description: The unique identifier for this component.
example: elasticsearch-destination
Expand Down Expand Up @@ -36063,6 +36065,23 @@ components:
- V6
- V7
- V8
ObservabilityPipelineElasticsearchDestinationDataStream:
description: Configuration options for writing to Elasticsearch Data Streams
instead of a fixed index.
properties:
dataset:
description: The data stream dataset for your logs. This groups logs by
their source or application.
type: string
dtype:
description: The data stream type for your logs. This determines how logs
are categorized within the data stream.
type: string
namespace:
description: The data stream namespace for your logs. This separates logs
into different environments or domains.
type: string
type: object
ObservabilityPipelineElasticsearchDestinationType:
default: elasticsearch
description: The destination type. The value should always be `elasticsearch`.
Expand Down
1 change: 1 addition & 0 deletions lib/datadog_api_client/inflector.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3182,6 +3182,7 @@ def overrides
"v2.observability_pipeline_dedupe_processor_type" => "ObservabilityPipelineDedupeProcessorType",
"v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination",
"v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion",
"v2.observability_pipeline_elasticsearch_destination_data_stream" => "ObservabilityPipelineElasticsearchDestinationDataStream",
"v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType",
"v2.observability_pipeline_enrichment_table_file" => "ObservabilityPipelineEnrichmentTableFile",
"v2.observability_pipeline_enrichment_table_file_encoding" => "ObservabilityPipelineEnrichmentTableFileEncoding",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ class ObservabilityPipelineElasticsearchDestination
# The index to write logs to in Elasticsearch.
attr_accessor :bulk_index

# Configuration options for writing to Elasticsearch Data Streams instead of a fixed index.
attr_accessor :data_stream

# The unique identifier for this component.
attr_reader :id

Expand All @@ -44,6 +47,7 @@ def self.attribute_map
{
:'api_version' => :'api_version',
:'bulk_index' => :'bulk_index',
:'data_stream' => :'data_stream',
:'id' => :'id',
:'inputs' => :'inputs',
:'type' => :'type'
Expand All @@ -56,6 +60,7 @@ def self.openapi_types
{
:'api_version' => :'ObservabilityPipelineElasticsearchDestinationApiVersion',
:'bulk_index' => :'String',
:'data_stream' => :'ObservabilityPipelineElasticsearchDestinationDataStream',
:'id' => :'String',
:'inputs' => :'Array<String>',
:'type' => :'ObservabilityPipelineElasticsearchDestinationType'
Expand Down Expand Up @@ -88,6 +93,10 @@ def initialize(attributes = {})
self.bulk_index = attributes[:'bulk_index']
end

if attributes.key?(:'data_stream')
self.data_stream = attributes[:'data_stream']
end

if attributes.key?(:'id')
self.id = attributes[:'id']
end
Expand Down Expand Up @@ -171,6 +180,7 @@ def ==(o)
self.class == o.class &&
api_version == o.api_version &&
bulk_index == o.bulk_index &&
data_stream == o.data_stream &&
id == o.id &&
inputs == o.inputs &&
type == o.type &&
Expand All @@ -181,7 +191,7 @@ def ==(o)
# @return [Integer] Hash code
# @!visibility private
def hash
[api_version, bulk_index, id, inputs, type, additional_properties].hash
[api_version, bulk_index, data_stream, id, inputs, type, additional_properties].hash
end
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
=begin
#Datadog API V2 Collection

#Collection of all Datadog Public endpoints.

The version of the OpenAPI document: 1.0
Contact: support@datadoghq.com
Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator

Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.

=end

require 'date'
require 'time'

module DatadogAPIClient::V2
# Configuration options for writing to Elasticsearch Data Streams instead of a fixed index.
class ObservabilityPipelineElasticsearchDestinationDataStream
include BaseGenericModel

# The data stream dataset for your logs. This groups logs by their source or application.
attr_accessor :dataset

# The data stream type for your logs. This determines how logs are categorized within the data stream.
attr_accessor :dtype

# The data stream namespace for your logs. This separates logs into different environments or domains.
attr_accessor :namespace

attr_accessor :additional_properties

# Attribute mapping from ruby-style variable name to JSON key.
# @!visibility private
def self.attribute_map
{
:'dataset' => :'dataset',
:'dtype' => :'dtype',
:'namespace' => :'namespace'
}
end

# Attribute type mapping.
# @!visibility private
def self.openapi_types
{
:'dataset' => :'String',
:'dtype' => :'String',
:'namespace' => :'String'
}
end

# Initializes the object
# @param attributes [Hash] Model attributes in the form of hash
# @!visibility private
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineElasticsearchDestinationDataStream` initialize method"
end

self.additional_properties = {}
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
self.additional_properties[k.to_sym] = v
else
h[k.to_sym] = v
end
}

if attributes.key?(:'dataset')
self.dataset = attributes[:'dataset']
end

if attributes.key?(:'dtype')
self.dtype = attributes[:'dtype']
end

if attributes.key?(:'namespace')
self.namespace = attributes[:'namespace']
end
end

# Returns the object in the form of hash, with additionalProperties support.
# @return [Hash] Returns the object in the form of hash
# @!visibility private
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end

hash[param] = _to_hash(value)
end
self.additional_properties.each_pair do |attr, value|
hash[attr] = value
end
hash
end

# Checks equality by comparing each attribute.
# @param o [Object] Object to be compared
# @!visibility private
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
dataset == o.dataset &&
dtype == o.dtype &&
namespace == o.namespace &&
additional_properties == o.additional_properties
end

# Calculates hash code according to all attributes.
# @return [Integer] Hash code
# @!visibility private
def hash
[dataset, dtype, namespace, additional_properties].hash
end
end
end
Loading