From 2a2ff3bb6aaa18a25ea7a0c829e70f2495904c22 Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Fri, 2 Jan 2026 14:41:35 +0000 Subject: [PATCH] Regenerate client from commit 0db975e of spec repo --- .generator/schemas/v2/openapi.yaml | 720 ++++++++-- .../CreatePipeline.java | 2 +- .../UpdatePipeline.java | 2 +- .../ValidatePipeline.java | 2 +- .../com/datadog/api/client/ApiClient.java | 12 +- .../v2/api/ObservabilityPipelinesApi.java | 12 +- ...rvabilityPipelineAddHostnameProcessor.java | 268 ++++ ...ilityPipelineAddHostnameProcessorType.java | 63 + ...rvabilityPipelineCloudPremDestination.java | 219 +++ ...ilityPipelineCloudPremDestinationType.java | 62 + .../v2/model/ObservabilityPipelineConfig.java | 44 +- ...vabilityPipelineConfigDestinationItem.java | 197 ++- ...ervabilityPipelineConfigProcessorItem.java | 1250 ++++++++++------- ...ObservabilityPipelineConfigSourceItem.java | 85 +- ...ilityPipelineElasticsearchDestination.java | 31 +- ...ineElasticsearchDestinationDataStream.java | 203 +++ ...ilityPipelineEnrichmentTableProcessor.java | 46 +- ...PipelineEnrichmentTableReferenceTable.java | 219 +++ ...ObservabilityPipelineKafkaDestination.java | 595 ++++++++ ...tyPipelineKafkaDestinationCompression.java | 71 + ...ilityPipelineKafkaDestinationEncoding.java | 65 + ...rvabilityPipelineKafkaDestinationType.java | 62 + ...abilityPipelineKafkaLibrdkafkaOption.java} | 35 +- ...va => ObservabilityPipelineKafkaSasl.java} | 30 +- ...servabilityPipelineKafkaSaslMechanism.java | 67 + .../ObservabilityPipelineKafkaSource.java | 20 +- ...ervabilityPipelineOpentelemetrySource.java | 274 ++++ ...bilityPipelineOpentelemetrySourceType.java | 63 + ...bservabilityPipelineParseXMLProcessor.java | 503 +++++++ ...vabilityPipelineParseXMLProcessorType.java | 62 + ...elinePipelineKafkaSourceSaslMechanism.java | 67 - .../ObservabilityPipelineQuotaProcessor.java | 52 +- ...yPipelineQuotaProcessorOverflowAction.java | 6 +- .../ObservabilityPipelineSampleProcessor.java | 71 +- ...aScannerProcessorCustomPatternOptions.java | 32 +- ...ScannerProcessorLibraryPatternOptions.java | 32 +- ...ervabilityPipelineSplitArrayProcessor.java | 314 +++++ ...ipelineSplitArrayProcessorArrayConfig.java | 180 +++ ...bilityPipelineSplitArrayProcessorType.java | 63 + .../com/datadog/api/client/v2/api/given.json | 24 +- .../v2/api/observability_pipelines.feature | 18 +- .../com/datadog/api/client/v2/api/undo.json | 50 +- 42 files changed, 5267 insertions(+), 926 deletions(-) create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java rename src/main/java/com/datadog/api/client/v2/model/{ObservabilityPipelineKafkaSourceLibrdkafkaOption.java => ObservabilityPipelineKafkaLibrdkafkaOption.java} (78%) rename src/main/java/com/datadog/api/client/v2/model/{ObservabilityPipelineKafkaSourceSasl.java => ObservabilityPipelineKafkaSasl.java} (78%) create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java delete mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b74841..dbe6ebb4cdc 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -35178,6 +35178,43 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: The `add_hostname` processor adds the hostname to log events. + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. properties: @@ -35455,6 +35492,37 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: The `cloud_prem` destination sends logs to Datadog CloudPrem. + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35473,7 +35541,7 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array - processors: + processor_groups: description: A list of processor groups that transform or enrich log data. example: - enabled: true @@ -35510,6 +35578,7 @@ components: description: A destination for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' @@ -35528,6 +35597,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35600,24 +35670,27 @@ components: ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: @@ -35637,6 +35710,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. @@ -36030,6 +36104,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36063,6 +36139,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36203,7 +36296,8 @@ components: type: object ObservabilityPipelineEnrichmentTableProcessor: description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36224,6 +36318,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36246,6 +36342,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36826,6 +36944,151 @@ components: type: string x-enum-varnames: - HTTP_SERVER + ObservabilityPipelineKafkaDestination: + description: The `kafka` destination sends logs to Apache Kafka topics. + properties: + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers + type: string + id: + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id + type: string + librdkafka_options: + description: Optional list of advanced Kafka producer configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' + required: + - id + - type + - inputs + - topic + - encoding + type: object + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' + type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 ObservabilityPipelineKafkaSource: description: The `kafka` source ingests data from Apache Kafka topics. properties: @@ -36843,10 +37106,10 @@ components: description: Optional list of advanced Kafka client configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' topics: @@ -36866,30 +37129,6 @@ components: - group_id - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. - properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' - type: string - required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' - type: object ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -37116,6 +37355,45 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: description: The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. @@ -37279,17 +37557,72 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: description: The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor @@ -37298,9 +37631,10 @@ components: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: @@ -37344,6 +37678,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37381,7 +37717,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37711,6 +38048,16 @@ components: description: Whether this processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -37727,18 +38074,13 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object ObservabilityPipelineSampleProcessorType: @@ -37907,6 +38249,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -37962,6 +38309,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38442,6 +38794,68 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: The `split_array` processor splits array fields into separate events + based on configured rules. + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: description: The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). @@ -75879,6 +76293,103 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -79313,103 +79824,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: delete: description: Delete a pipeline. diff --git a/examples/v2/observability-pipelines/CreatePipeline.java b/examples/v2/observability-pipelines/CreatePipeline.java index dfa818395c3..1d8a262de7a 100644 --- a/examples/v2/observability-pipelines/CreatePipeline.java +++ b/examples/v2/observability-pipelines/CreatePipeline.java @@ -45,7 +45,7 @@ public static void main(String[] args) { .type( ObservabilityPipelineDatadogLogsDestinationType .DATADOG_LOGS)))) - .processors( + .processorGroups( Collections.singletonList( new ObservabilityPipelineConfigProcessorGroup() .enabled(true) diff --git a/examples/v2/observability-pipelines/UpdatePipeline.java b/examples/v2/observability-pipelines/UpdatePipeline.java index a62545d57d4..309f8652c78 100644 --- a/examples/v2/observability-pipelines/UpdatePipeline.java +++ b/examples/v2/observability-pipelines/UpdatePipeline.java @@ -47,7 +47,7 @@ public static void main(String[] args) { .type( ObservabilityPipelineDatadogLogsDestinationType .DATADOG_LOGS)))) - .processors( + .processorGroups( Collections.singletonList( new ObservabilityPipelineConfigProcessorGroup() .enabled(true) diff --git a/examples/v2/observability-pipelines/ValidatePipeline.java b/examples/v2/observability-pipelines/ValidatePipeline.java index 667de7423d7..7a62f7ca31c 100644 --- a/examples/v2/observability-pipelines/ValidatePipeline.java +++ b/examples/v2/observability-pipelines/ValidatePipeline.java @@ -45,7 +45,7 @@ public static void main(String[] args) { .type( ObservabilityPipelineDatadogLogsDestinationType .DATADOG_LOGS)))) - .processors( + .processorGroups( Collections.singletonList( new ObservabilityPipelineConfigProcessorGroup() .enabled(true) diff --git a/src/main/java/com/datadog/api/client/ApiClient.java b/src/main/java/com/datadog/api/client/ApiClient.java index d9c3f82eb2b..165ee8d58f6 100644 --- a/src/main/java/com/datadog/api/client/ApiClient.java +++ b/src/main/java/com/datadog/api/client/ApiClient.java @@ -800,6 +800,12 @@ public class ApiClient { put("v2.updateMonitorUserTemplate", false); put("v2.validateExistingMonitorUserTemplate", false); put("v2.validateMonitorUserTemplate", false); + put("v2.createPipeline", false); + put("v2.deletePipeline", false); + put("v2.getPipeline", false); + put("v2.listPipelines", false); + put("v2.updatePipeline", false); + put("v2.validatePipeline", false); put("v2.listRoleTemplates", false); put("v2.createConnection", false); put("v2.deleteConnection", false); @@ -811,12 +817,6 @@ public class ApiClient { put("v2.queryEventFilteredUsers", false); put("v2.queryUsers", false); put("v2.updateConnection", false); - put("v2.createPipeline", false); - put("v2.deletePipeline", false); - put("v2.getPipeline", false); - put("v2.listPipelines", false); - put("v2.updatePipeline", false); - put("v2.validatePipeline", false); put("v2.createScorecardOutcomesBatch", false); put("v2.createScorecardRule", false); put("v2.deleteScorecardRule", false); diff --git a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java index fc9fc737859..c67f32860ad 100644 --- a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java +++ b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java @@ -111,7 +111,7 @@ public ApiResponse createPipelineWithHttpInfo( 400, "Missing the required parameter 'body' when calling createPipeline"); } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; Map localVarHeaderParams = new HashMap(); @@ -166,7 +166,7 @@ public CompletableFuture> createPipelineWithH return result; } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; Map localVarHeaderParams = new HashMap(); @@ -618,7 +618,7 @@ public ApiResponse listPipelinesWithHttpInfo( Long pageSize = parameters.pageSize; Long pageNumber = parameters.pageNumber; // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; List localVarQueryParams = new ArrayList(); Map localVarHeaderParams = new HashMap(); @@ -670,7 +670,7 @@ public CompletableFuture> listPipelinesWithHt Long pageSize = parameters.pageSize; Long pageNumber = parameters.pageNumber; // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; List localVarQueryParams = new ArrayList(); Map localVarHeaderParams = new HashMap(); @@ -946,7 +946,7 @@ public ApiResponse validatePipelineWithHttpInfo( 400, "Missing the required parameter 'body' when calling validatePipeline"); } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines/validate"; + String localVarPath = "/api/v2/obs-pipelines/pipelines/validate"; Map localVarHeaderParams = new HashMap(); @@ -1001,7 +1001,7 @@ public CompletableFuture> validatePipelineWithHt return result; } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines/validate"; + String localVarPath = "/api/v2/obs-pipelines/pipelines/validate"; Map localVarHeaderParams = new HashMap(); diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java new file mode 100644 index 00000000000..f6538cf3781 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java @@ -0,0 +1,268 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The add_hostname processor adds the hostname to log events. */ +@JsonPropertyOrder({ + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAddHostnameProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAddHostnameProcessorType type = + ObservabilityPipelineAddHostnameProcessorType.ADD_HOSTNAME; + + public ObservabilityPipelineAddHostnameProcessor() {} + + @JsonCreator + public ObservabilityPipelineAddHostnameProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAddHostnameProcessorType type) { + this.enabled = enabled; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAddHostnameProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineAddHostnameProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineAddHostnameProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAddHostnameProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineAddHostnameProcessor type( + ObservabilityPipelineAddHostnameProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be add_hostname. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAddHostnameProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineAddHostnameProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAddHostnameProcessor + */ + @JsonAnySetter + public ObservabilityPipelineAddHostnameProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAddHostnameProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAddHostnameProcessor observabilityPipelineAddHostnameProcessor = + (ObservabilityPipelineAddHostnameProcessor) o; + return Objects.equals(this.displayName, observabilityPipelineAddHostnameProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineAddHostnameProcessor.enabled) + && Objects.equals(this.id, observabilityPipelineAddHostnameProcessor.id) + && Objects.equals(this.include, observabilityPipelineAddHostnameProcessor.include) + && Objects.equals(this.type, observabilityPipelineAddHostnameProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAddHostnameProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, enabled, id, include, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAddHostnameProcessor {\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java new file mode 100644 index 00000000000..c635c6c8a00 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be add_hostname. */ +@JsonSerialize( + using = + ObservabilityPipelineAddHostnameProcessorType + .ObservabilityPipelineAddHostnameProcessorTypeSerializer.class) +public class ObservabilityPipelineAddHostnameProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("add_hostname")); + + public static final ObservabilityPipelineAddHostnameProcessorType ADD_HOSTNAME = + new ObservabilityPipelineAddHostnameProcessorType("add_hostname"); + + ObservabilityPipelineAddHostnameProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAddHostnameProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAddHostnameProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAddHostnameProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAddHostnameProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAddHostnameProcessorType fromValue(String value) { + return new ObservabilityPipelineAddHostnameProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java new file mode 100644 index 00000000000..fb9d1626d06 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java @@ -0,0 +1,219 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The cloud_prem destination sends logs to Datadog CloudPrem. */ +@JsonPropertyOrder({ + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_ID, + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineCloudPremDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineCloudPremDestinationType type = + ObservabilityPipelineCloudPremDestinationType.CLOUD_PREM; + + public ObservabilityPipelineCloudPremDestination() {} + + @JsonCreator + public ObservabilityPipelineCloudPremDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineCloudPremDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineCloudPremDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineCloudPremDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineCloudPremDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineCloudPremDestination type( + ObservabilityPipelineCloudPremDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be cloud_prem. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineCloudPremDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineCloudPremDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineCloudPremDestination + */ + @JsonAnySetter + public ObservabilityPipelineCloudPremDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineCloudPremDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineCloudPremDestination observabilityPipelineCloudPremDestination = + (ObservabilityPipelineCloudPremDestination) o; + return Objects.equals(this.id, observabilityPipelineCloudPremDestination.id) + && Objects.equals(this.inputs, observabilityPipelineCloudPremDestination.inputs) + && Objects.equals(this.type, observabilityPipelineCloudPremDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineCloudPremDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineCloudPremDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java new file mode 100644 index 00000000000..e656648144e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be cloud_prem. */ +@JsonSerialize( + using = + ObservabilityPipelineCloudPremDestinationType + .ObservabilityPipelineCloudPremDestinationTypeSerializer.class) +public class ObservabilityPipelineCloudPremDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("cloud_prem")); + + public static final ObservabilityPipelineCloudPremDestinationType CLOUD_PREM = + new ObservabilityPipelineCloudPremDestinationType("cloud_prem"); + + ObservabilityPipelineCloudPremDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineCloudPremDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineCloudPremDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineCloudPremDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineCloudPremDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineCloudPremDestinationType fromValue(String value) { + return new ObservabilityPipelineCloudPremDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java index 4acddfa3a70..d08246da28c 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java @@ -22,7 +22,7 @@ /** Specifies the pipeline's configuration, including its sources, processors, and destinations. */ @JsonPropertyOrder({ ObservabilityPipelineConfig.JSON_PROPERTY_DESTINATIONS, - ObservabilityPipelineConfig.JSON_PROPERTY_PROCESSORS, + ObservabilityPipelineConfig.JSON_PROPERTY_PROCESSOR_GROUPS, ObservabilityPipelineConfig.JSON_PROPERTY_SOURCES }) @jakarta.annotation.Generated( @@ -32,8 +32,8 @@ public class ObservabilityPipelineConfig { public static final String JSON_PROPERTY_DESTINATIONS = "destinations"; private List destinations = new ArrayList<>(); - public static final String JSON_PROPERTY_PROCESSORS = "processors"; - private List processors = null; + public static final String JSON_PROPERTY_PROCESSOR_GROUPS = "processor_groups"; + private List processorGroups = null; public static final String JSON_PROPERTY_SOURCES = "sources"; private List sources = new ArrayList<>(); @@ -81,39 +81,39 @@ public void setDestinations(List des this.destinations = destinations; } - public ObservabilityPipelineConfig processors( - List processors) { - this.processors = processors; - for (ObservabilityPipelineConfigProcessorGroup item : processors) { + public ObservabilityPipelineConfig processorGroups( + List processorGroups) { + this.processorGroups = processorGroups; + for (ObservabilityPipelineConfigProcessorGroup item : processorGroups) { this.unparsed |= item.unparsed; } return this; } - public ObservabilityPipelineConfig addProcessorsItem( - ObservabilityPipelineConfigProcessorGroup processorsItem) { - if (this.processors == null) { - this.processors = new ArrayList<>(); + public ObservabilityPipelineConfig addProcessorGroupsItem( + ObservabilityPipelineConfigProcessorGroup processorGroupsItem) { + if (this.processorGroups == null) { + this.processorGroups = new ArrayList<>(); } - this.processors.add(processorsItem); - this.unparsed |= processorsItem.unparsed; + this.processorGroups.add(processorGroupsItem); + this.unparsed |= processorGroupsItem.unparsed; return this; } /** * A list of processor groups that transform or enrich log data. * - * @return processors + * @return processorGroups */ @jakarta.annotation.Nullable - @JsonProperty(JSON_PROPERTY_PROCESSORS) + @JsonProperty(JSON_PROPERTY_PROCESSOR_GROUPS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public List getProcessors() { - return processors; + public List getProcessorGroups() { + return processorGroups; } - public void setProcessors(List processors) { - this.processors = processors; + public void setProcessorGroups(List processorGroups) { + this.processorGroups = processorGroups; } public ObservabilityPipelineConfig sources(List sources) { @@ -203,7 +203,7 @@ public boolean equals(Object o) { } ObservabilityPipelineConfig observabilityPipelineConfig = (ObservabilityPipelineConfig) o; return Objects.equals(this.destinations, observabilityPipelineConfig.destinations) - && Objects.equals(this.processors, observabilityPipelineConfig.processors) + && Objects.equals(this.processorGroups, observabilityPipelineConfig.processorGroups) && Objects.equals(this.sources, observabilityPipelineConfig.sources) && Objects.equals( this.additionalProperties, observabilityPipelineConfig.additionalProperties); @@ -211,7 +211,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(destinations, processors, sources, additionalProperties); + return Objects.hash(destinations, processorGroups, sources, additionalProperties); } @Override @@ -219,7 +219,7 @@ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ObservabilityPipelineConfig {\n"); sb.append(" destinations: ").append(toIndentedString(destinations)).append("\n"); - sb.append(" processors: ").append(toIndentedString(processors)).append("\n"); + sb.append(" processorGroups: ").append(toIndentedString(processorGroups)).append("\n"); sb.append(" sources: ").append(toIndentedString(sources)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java index 2f4e68bdef4..8e45b1020e8 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java @@ -142,6 +142,57 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } + // deserialize ObservabilityPipelineCloudPremDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineCloudPremDestination.class.equals(Integer.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Long.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Float.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Double.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Boolean.class) + || ObservabilityPipelineCloudPremDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineCloudPremDestination.class.equals(Integer.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineCloudPremDestination.class.equals(Float.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineCloudPremDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineCloudPremDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineCloudPremDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineCloudPremDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineCloudPremDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineCloudPremDestination'", + e); + } + // deserialize ObservabilityPipelineAmazonS3Destination try { boolean attemptParsing = true; @@ -1067,6 +1118,55 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } + // deserialize ObservabilityPipelineKafkaDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineKafkaDestination.class.equals(Integer.class) + || ObservabilityPipelineKafkaDestination.class.equals(Long.class) + || ObservabilityPipelineKafkaDestination.class.equals(Float.class) + || ObservabilityPipelineKafkaDestination.class.equals(Double.class) + || ObservabilityPipelineKafkaDestination.class.equals(Boolean.class) + || ObservabilityPipelineKafkaDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineKafkaDestination.class.equals(Integer.class) + || ObservabilityPipelineKafkaDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineKafkaDestination.class.equals(Float.class) + || ObservabilityPipelineKafkaDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineKafkaDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineKafkaDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineKafkaDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineKafkaDestination'", + e); + } + ObservabilityPipelineConfigDestinationItem ret = new ObservabilityPipelineConfigDestinationItem(); if (match == 1) { @@ -1103,6 +1203,11 @@ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLo setActualInstance(o); } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineCloudPremDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineAmazonS3Destination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); @@ -1200,10 +1305,18 @@ public ObservabilityPipelineConfigDestinationItem( setActualInstance(o); } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineKafkaDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineDatadogLogsDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineCloudPremDestination", + new GenericType() {}); schemas.put( "ObservabilityPipelineAmazonS3Destination", new GenericType() {}); @@ -1254,6 +1367,9 @@ public ObservabilityPipelineConfigDestinationItem( schemas.put( "ObservabilityPipelineGooglePubSubDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineKafkaDestination", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigDestinationItem.class, Collections.unmodifiableMap(schemas)); } @@ -1266,16 +1382,17 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination, - * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, - * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, - * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, - * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, - * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, - * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, - * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination, - * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination, + * ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, + * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination, + * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination, + * AzureStorageDestination, MicrosoftSentinelDestination, + * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination, + * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, + * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, + * ObservabilityPipelineAmazonSecurityLakeDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1287,6 +1404,11 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineCloudPremDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf( ObservabilityPipelineAmazonS3Destination.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -1383,6 +1505,11 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineKafkaDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -1390,6 +1517,7 @@ public void setActualInstance(Object instance) { } throw new RuntimeException( "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination," + + " ObservabilityPipelineCloudPremDestination," + " ObservabilityPipelineAmazonS3Destination," + " ObservabilityPipelineGoogleCloudStorageDestination," + " ObservabilityPipelineSplunkHecDestination," @@ -1405,25 +1533,26 @@ public void setActualInstance(Object instance) { + " ObservabilityPipelineSocketDestination," + " ObservabilityPipelineAmazonSecurityLakeDestination," + " ObservabilityPipelineCrowdStrikeNextGenSiemDestination," - + " ObservabilityPipelineGooglePubSubDestination"); + + " ObservabilityPipelineGooglePubSubDestination," + + " ObservabilityPipelineKafkaDestination"); } /** * Get the actual instance, which can be the following: - * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineAmazonS3Destination, - * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, - * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination, - * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination, - * AzureStorageDestination, MicrosoftSentinelDestination, - * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination, - * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, - * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, - * ObservabilityPipelineAmazonSecurityLakeDestination, + * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineCloudPremDestination, + * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, + * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, + * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, + * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, + * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, + * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination, + * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination * * @return The actual instance (ObservabilityPipelineDatadogLogsDestination, - * ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineCloudPremDestination, ObservabilityPipelineAmazonS3Destination, * ObservabilityPipelineGoogleCloudStorageDestination, * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, @@ -1434,7 +1563,7 @@ public void setActualInstance(Object instance) { * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, * ObservabilityPipelineAmazonSecurityLakeDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination) + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination) */ @Override public Object getActualInstance() { @@ -1454,6 +1583,18 @@ public Object getActualInstance() { return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance(); } + /** + * Get the actual instance of `ObservabilityPipelineCloudPremDestination`. If the actual instance + * is not `ObservabilityPipelineCloudPremDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineCloudPremDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineCloudPremDestination` + */ + public ObservabilityPipelineCloudPremDestination getObservabilityPipelineCloudPremDestination() + throws ClassCastException { + return (ObservabilityPipelineCloudPremDestination) super.getActualInstance(); + } + /** * Get the actual instance of `ObservabilityPipelineAmazonS3Destination`. If the actual instance * is not `ObservabilityPipelineAmazonS3Destination`, the ClassCastException will be thrown. @@ -1682,4 +1823,16 @@ public ObservabilityPipelineSocketDestination getObservabilityPipelineSocketDest getObservabilityPipelineGooglePubSubDestination() throws ClassCastException { return (ObservabilityPipelineGooglePubSubDestination) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineKafkaDestination`. If the actual instance is + * not `ObservabilityPipelineKafkaDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineKafkaDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaDestination` + */ + public ObservabilityPipelineKafkaDestination getObservabilityPipelineKafkaDestination() + throws ClassCastException { + return (ObservabilityPipelineKafkaDestination) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java index 3ec77e02e20..0c4d9771729 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java @@ -90,715 +90,710 @@ public ObservabilityPipelineConfigProcessorItem deserialize( boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); int match = 0; JsonToken token = tree.traverse(jp.getCodec()).nextToken(); - // deserialize ObservabilityPipelineFilterProcessor + // deserialize ObservabilityPipelineAddEnvVarsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineFilterProcessor.class.equals(Integer.class) - || ObservabilityPipelineFilterProcessor.class.equals(Long.class) - || ObservabilityPipelineFilterProcessor.class.equals(Float.class) - || ObservabilityPipelineFilterProcessor.class.equals(Double.class) - || ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) - || ObservabilityPipelineFilterProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineFilterProcessor.class.equals(Integer.class) - || ObservabilityPipelineFilterProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineFilterProcessor.class.equals(Float.class) - || ObservabilityPipelineFilterProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineFilterProcessor.class.equals(String.class) + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFilterProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineFilterProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFilterProcessor'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineFilterProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'", e); } - // deserialize ObservabilityPipelineParseJSONProcessor + // deserialize ObservabilityPipelineAddFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineParseJSONProcessor.class.equals(String.class) + (ObservabilityPipelineAddFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineParseJSONProcessor.class); + .readValueAs(ObservabilityPipelineAddFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddFieldsProcessor'", e); } - // deserialize ObservabilityPipelineQuotaProcessor + // deserialize ObservabilityPipelineAddHostnameProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Long.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Float.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Double.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) - || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddHostnameProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Long.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Float.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Double.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddHostnameProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddHostnameProcessor.class.equals(Float.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddHostnameProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineQuotaProcessor.class.equals(String.class) + (ObservabilityPipelineAddHostnameProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class); + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAddHostnameProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddHostnameProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddHostnameProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddHostnameProcessor'", e); } - // deserialize ObservabilityPipelineAddFieldsProcessor + // deserialize ObservabilityPipelineCustomProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class) + || ObservabilityPipelineCustomProcessor.class.equals(Long.class) + || ObservabilityPipelineCustomProcessor.class.equals(Float.class) + || ObservabilityPipelineCustomProcessor.class.equals(Double.class) + || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) + || ObservabilityPipelineCustomProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class) + || ObservabilityPipelineCustomProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineCustomProcessor.class.equals(Float.class) + || ObservabilityPipelineCustomProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAddFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineCustomProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAddFieldsProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAddFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineAddFieldsProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAddFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineCustomProcessor'", e); } - // deserialize ObservabilityPipelineRemoveFieldsProcessor + // deserialize ObservabilityPipelineDatadogTagsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class); + .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'", e); } - // deserialize ObservabilityPipelineRenameFieldsProcessor + // deserialize ObservabilityPipelineDedupeProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineDedupeProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'", e); } - // deserialize ObservabilityPipelineGenerateMetricsProcessor + // deserialize ObservabilityPipelineEnrichmentTableProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); + .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); + "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", + "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'", e); } - // deserialize ObservabilityPipelineSampleProcessor + // deserialize ObservabilityPipelineFilterProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class) - || ObservabilityPipelineSampleProcessor.class.equals(Long.class) - || ObservabilityPipelineSampleProcessor.class.equals(Float.class) - || ObservabilityPipelineSampleProcessor.class.equals(Double.class) - || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) - || ObservabilityPipelineSampleProcessor.class.equals(String.class)) { + if (ObservabilityPipelineFilterProcessor.class.equals(Integer.class) + || ObservabilityPipelineFilterProcessor.class.equals(Long.class) + || ObservabilityPipelineFilterProcessor.class.equals(Float.class) + || ObservabilityPipelineFilterProcessor.class.equals(Double.class) + || ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) + || ObservabilityPipelineFilterProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class) - || ObservabilityPipelineSampleProcessor.class.equals(Long.class)) + ((ObservabilityPipelineFilterProcessor.class.equals(Integer.class) + || ObservabilityPipelineFilterProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSampleProcessor.class.equals(Float.class) - || ObservabilityPipelineSampleProcessor.class.equals(Double.class)) + ((ObservabilityPipelineFilterProcessor.class.equals(Float.class) + || ObservabilityPipelineFilterProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + (ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSampleProcessor.class.equals(String.class) + (ObservabilityPipelineFilterProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFilterProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineFilterProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFilterProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSampleProcessor'", + "Input data does not match schema 'ObservabilityPipelineFilterProcessor'", e); } - // deserialize ObservabilityPipelineParseGrokProcessor + // deserialize ObservabilityPipelineGenerateMetricsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(String.class)) { + if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class)) + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class)) + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineParseGrokProcessor.class.equals(String.class) + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineParseGrokProcessor.class); + .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineParseGrokProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineParseGrokProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineParseGrokProcessor'", + "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", e); } - // deserialize ObservabilityPipelineSensitiveDataScannerProcessor + // deserialize ObservabilityPipelineOcsfMapperProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) { + if (ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( - Long.class)) + ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( - Double.class)) + ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + (ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class) + (ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class); + .readValueAs(ObservabilityPipelineOcsfMapperProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineOcsfMapperProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMapperProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'", + "Input data does not match schema 'ObservabilityPipelineOcsfMapperProcessor'", e); } - // deserialize ObservabilityPipelineOcsfMapperProcessor + // deserialize ObservabilityPipelineParseGrokProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) - || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class) + (ObservabilityPipelineParseGrokProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineOcsfMapperProcessor.class); + .readValueAs(ObservabilityPipelineParseGrokProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineOcsfMapperProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseGrokProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMapperProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseGrokProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineOcsfMapperProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseGrokProcessor'", e); } - // deserialize ObservabilityPipelineAddEnvVarsProcessor + // deserialize ObservabilityPipelineParseJSONProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class) + (ObservabilityPipelineParseJSONProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class); + .readValueAs(ObservabilityPipelineParseJSONProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'", e); } - // deserialize ObservabilityPipelineDedupeProcessor + // deserialize ObservabilityPipelineParseXMLProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Long.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Float.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Double.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) - || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseXMLProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Long.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Float.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Double.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseXMLProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseXMLProcessor.class.equals(Float.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseXMLProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineDedupeProcessor.class.equals(String.class) + (ObservabilityPipelineParseXMLProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineParseXMLProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseXMLProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseXMLProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseXMLProcessor'", e); } - // deserialize ObservabilityPipelineEnrichmentTableProcessor + // deserialize ObservabilityPipelineQuotaProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) { + if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)) + ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)) + ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class) + (ObservabilityPipelineQuotaProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class); + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'", + "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'", e); } @@ -851,154 +846,312 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } - // deserialize ObservabilityPipelineThrottleProcessor + // deserialize ObservabilityPipelineRemoveFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Long.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Float.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Double.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) - || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) { + if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)) + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)) + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineThrottleProcessor.class.equals(String.class) + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineThrottleProcessor.class); + .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'", + "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'", e); } - // deserialize ObservabilityPipelineCustomProcessor + // deserialize ObservabilityPipelineRenameFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class) - || ObservabilityPipelineCustomProcessor.class.equals(Long.class) - || ObservabilityPipelineCustomProcessor.class.equals(Float.class) - || ObservabilityPipelineCustomProcessor.class.equals(Double.class) - || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) - || ObservabilityPipelineCustomProcessor.class.equals(String.class)) { + if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class) - || ObservabilityPipelineCustomProcessor.class.equals(Long.class)) + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineCustomProcessor.class.equals(Float.class) - || ObservabilityPipelineCustomProcessor.class.equals(Double.class)) + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) + (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineCustomProcessor.class.equals(String.class) + (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'"); + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineCustomProcessor'", + "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'", e); } - // deserialize ObservabilityPipelineDatadogTagsProcessor + // deserialize ObservabilityPipelineSampleProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class) + || ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class) + || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSampleProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class) + (ObservabilityPipelineSampleProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSampleProcessor'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class); + .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'", + "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'", + e); + } + + // deserialize ObservabilityPipelineSplitArrayProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplitArrayProcessor.class.equals(Integer.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Long.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Float.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Double.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplitArrayProcessor.class.equals(Integer.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplitArrayProcessor.class.equals(Float.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplitArrayProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplitArrayProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSplitArrayProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplitArrayProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSplitArrayProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplitArrayProcessor'", + e); + } + + // deserialize ObservabilityPipelineThrottleProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineThrottleProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'", e); } @@ -1032,32 +1185,42 @@ public ObservabilityPipelineConfigProcessorItem() { super("oneOf", Boolean.FALSE); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineFilterProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddHostnameProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineFilterProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1067,7 +1230,7 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineGenerateMet setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1077,107 +1240,121 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseGrokPr setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem( - ObservabilityPipelineSensitiveDataScannerProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseXMLProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) { + public ObservabilityPipelineConfigProcessorItem( + ObservabilityPipelineSensitiveDataScannerProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSplitArrayProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } static { + schemas.put( + "ObservabilityPipelineAddEnvVarsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddFieldsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddHostnameProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineCustomProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDatadogTagsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDedupeProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineEnrichmentTableProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineFilterProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineGenerateMetricsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineOcsfMapperProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseGrokProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineParseJSONProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseXMLProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineQuotaProcessor", new GenericType() {}); schemas.put( - "ObservabilityPipelineAddFieldsProcessor", - new GenericType() {}); + "ObservabilityPipelineReduceProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineRemoveFieldsProcessor", new GenericType() {}); schemas.put( "ObservabilityPipelineRenameFieldsProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineGenerateMetricsProcessor", - new GenericType() {}); schemas.put( "ObservabilityPipelineSampleProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineParseGrokProcessor", - new GenericType() {}); schemas.put( "ObservabilityPipelineSensitiveDataScannerProcessor", new GenericType() {}); schemas.put( - "ObservabilityPipelineOcsfMapperProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineAddEnvVarsProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineDedupeProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineEnrichmentTableProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineReduceProcessor", - new GenericType() {}); + "ObservabilityPipelineSplitArrayProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineThrottleProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineCustomProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineDatadogTagsProcessor", - new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigProcessorItem.class, Collections.unmodifiableMap(schemas)); } @@ -1189,16 +1366,17 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid - * against the oneOf child schemas: ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, - * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, - * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, - * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor, - * ObservabilityPipelineDatadogTagsProcessor + * against the oneOf child schemas: ObservabilityPipelineAddEnvVarsProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor, + * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor, + * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, + * ObservabilityPipelineFilterProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1206,94 +1384,109 @@ public Map getSchemas() { @Override public void setActualInstance(Object instance) { if (JSON.isInstanceOf( - ObservabilityPipelineFilterProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineAddHostnameProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineFilterProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineParseGrokProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineParseGrokProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSensitiveDataScannerProcessor.class, - instance, - new HashSet>())) { + ObservabilityPipelineParseXMLProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSensitiveDataScannerProcessor.class, + instance, + new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSplitArrayProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1303,43 +1496,46 @@ public void setActualInstance(Object instance) { return; } throw new RuntimeException( - "Invalid instance type. Must be ObservabilityPipelineFilterProcessor," - + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor," - + " ObservabilityPipelineAddFieldsProcessor," - + " ObservabilityPipelineRemoveFieldsProcessor," - + " ObservabilityPipelineRenameFieldsProcessor," - + " ObservabilityPipelineGenerateMetricsProcessor," - + " ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor," - + " ObservabilityPipelineSensitiveDataScannerProcessor," - + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor," + "Invalid instance type. Must be ObservabilityPipelineAddEnvVarsProcessor," + + " ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor," + + " ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor," + " ObservabilityPipelineDedupeProcessor," + " ObservabilityPipelineEnrichmentTableProcessor," - + " ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor," - + " ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor"); + + " ObservabilityPipelineFilterProcessor," + + " ObservabilityPipelineGenerateMetricsProcessor," + + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor," + + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor," + + " ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor," + + " ObservabilityPipelineRemoveFieldsProcessor," + + " ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineSampleProcessor," + + " ObservabilityPipelineSensitiveDataScannerProcessor," + + " ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor"); } /** - * Get the actual instance, which can be the following: ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, - * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, - * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, - * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor, - * ObservabilityPipelineDatadogTagsProcessor + * Get the actual instance, which can be the following: ObservabilityPipelineAddEnvVarsProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor, + * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor, + * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, + * ObservabilityPipelineFilterProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor * - * @return The actual instance (ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, - * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor, + * @return The actual instance (ObservabilityPipelineAddEnvVarsProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineAddHostnameProcessor, + * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor, * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, - * ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor, - * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor) + * ObservabilityPipelineFilterProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor) */ @Override public Object getActualInstance() { @@ -1347,75 +1543,101 @@ public Object getActualInstance() { } /** - * Get the actual instance of `ObservabilityPipelineFilterProcessor`. If the actual instance is - * not `ObservabilityPipelineFilterProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance + * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineFilterProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineFilterProcessor` + * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor` */ - public ObservabilityPipelineFilterProcessor getObservabilityPipelineFilterProcessor() + public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor() throws ClassCastException { - return (ObservabilityPipelineFilterProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is - * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddFieldsProcessor`. If the actual instance is + * not `ObservabilityPipelineAddFieldsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineParseJSONProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor` + * @return The actual instance of `ObservabilityPipelineAddFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddFieldsProcessor` */ - public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor() + public ObservabilityPipelineAddFieldsProcessor getObservabilityPipelineAddFieldsProcessor() throws ClassCastException { - return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddFieldsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not - * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddHostnameProcessor`. If the actual instance + * is not `ObservabilityPipelineAddHostnameProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineQuotaProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor` + * @return The actual instance of `ObservabilityPipelineAddHostnameProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddHostnameProcessor` */ - public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor() + public ObservabilityPipelineAddHostnameProcessor getObservabilityPipelineAddHostnameProcessor() throws ClassCastException { - return (ObservabilityPipelineQuotaProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddHostnameProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineAddFieldsProcessor`. If the actual instance is - * not `ObservabilityPipelineAddFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is + * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAddFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineAddFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineCustomProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor` */ - public ObservabilityPipelineAddFieldsProcessor getObservabilityPipelineAddFieldsProcessor() + public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor() throws ClassCastException { - return (ObservabilityPipelineAddFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineCustomProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance - * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance + * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor` */ - public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor() + public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor() throws ClassCastException { - return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance - * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is + * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineDedupeProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor` */ - public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor() + public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor() throws ClassCastException { - return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineDedupeProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual + * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineEnrichmentTableProcessor` + */ + public ObservabilityPipelineEnrichmentTableProcessor + getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException { + return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineFilterProcessor`. If the actual instance is + * not `ObservabilityPipelineFilterProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineFilterProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineFilterProcessor` + */ + public ObservabilityPipelineFilterProcessor getObservabilityPipelineFilterProcessor() + throws ClassCastException { + return (ObservabilityPipelineFilterProcessor) super.getActualInstance(); } /** @@ -1433,15 +1655,15 @@ public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRename } /** - * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is - * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance + * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSampleProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor` + * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor` */ - public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor() + public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor() throws ClassCastException { - return (ObservabilityPipelineSampleProcessor) super.getActualInstance(); + return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance(); } /** @@ -1457,114 +1679,124 @@ public ObservabilityPipelineParseGrokProcessor getObservabilityPipelineParseGrok } /** - * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual - * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException - * will be thrown. + * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is + * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineSensitiveDataScannerProcessor` + * @return The actual instance of `ObservabilityPipelineParseJSONProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor` */ - public ObservabilityPipelineSensitiveDataScannerProcessor - getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException { - return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance(); + public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor() + throws ClassCastException { + return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance - * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineParseXMLProcessor`. If the actual instance is + * not `ObservabilityPipelineParseXMLProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor` + * @return The actual instance of `ObservabilityPipelineParseXMLProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseXMLProcessor` */ - public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor() + public ObservabilityPipelineParseXMLProcessor getObservabilityPipelineParseXMLProcessor() throws ClassCastException { - return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance(); + return (ObservabilityPipelineParseXMLProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance - * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not + * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor` + * @return The actual instance of `ObservabilityPipelineQuotaProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor` */ - public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor() + public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor() throws ClassCastException { - return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance(); + return (ObservabilityPipelineQuotaProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is - * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is + * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineDedupeProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor` + * @return The actual instance of `ObservabilityPipelineReduceProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor` */ - public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor() + public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor() throws ClassCastException { - return (ObservabilityPipelineDedupeProcessor) super.getActualInstance(); + return (ObservabilityPipelineReduceProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual - * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be - * thrown. + * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineEnrichmentTableProcessor` + * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor` */ - public ObservabilityPipelineEnrichmentTableProcessor - getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException { - return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance(); + public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor() + throws ClassCastException { + return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is - * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineReduceProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor` + * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor` */ - public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor() + public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor() throws ClassCastException { - return (ObservabilityPipelineReduceProcessor) super.getActualInstance(); + return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineThrottleProcessor`. If the actual instance is - * not `ObservabilityPipelineThrottleProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is + * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineThrottleProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineThrottleProcessor` + * @return The actual instance of `ObservabilityPipelineSampleProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor` */ - public ObservabilityPipelineThrottleProcessor getObservabilityPipelineThrottleProcessor() + public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor() throws ClassCastException { - return (ObservabilityPipelineThrottleProcessor) super.getActualInstance(); + return (ObservabilityPipelineSampleProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is - * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual + * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException + * will be thrown. * - * @return The actual instance of `ObservabilityPipelineCustomProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor` + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessor` */ - public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor() + public ObservabilityPipelineSensitiveDataScannerProcessor + getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSplitArrayProcessor`. If the actual instance + * is not `ObservabilityPipelineSplitArrayProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplitArrayProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplitArrayProcessor` + */ + public ObservabilityPipelineSplitArrayProcessor getObservabilityPipelineSplitArrayProcessor() throws ClassCastException { - return (ObservabilityPipelineCustomProcessor) super.getActualInstance(); + return (ObservabilityPipelineSplitArrayProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance - * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineThrottleProcessor`. If the actual instance is + * not `ObservabilityPipelineThrottleProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor` + * @return The actual instance of `ObservabilityPipelineThrottleProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineThrottleProcessor` */ - public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor() + public ObservabilityPipelineThrottleProcessor getObservabilityPipelineThrottleProcessor() throws ClassCastException { - return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance(); + return (ObservabilityPipelineThrottleProcessor) super.getActualInstance(); } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java index 58c17babac0..a0c74af90c5 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java @@ -869,6 +869,57 @@ public ObservabilityPipelineConfigSourceItem deserialize( Level.FINER, "Input data does not match schema 'ObservabilityPipelineSocketSource'", e); } + // deserialize ObservabilityPipelineOpentelemetrySource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineOpentelemetrySource.class.equals(Integer.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Long.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Float.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Double.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Boolean.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineOpentelemetrySource.class.equals(Integer.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineOpentelemetrySource.class.equals(Float.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineOpentelemetrySource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineOpentelemetrySource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineOpentelemetrySource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineOpentelemetrySource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineOpentelemetrySource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineOpentelemetrySource'", + e); + } + ObservabilityPipelineConfigSourceItem ret = new ObservabilityPipelineConfigSourceItem(); if (match == 1) { ret.setActualInstance(deserialized); @@ -979,6 +1030,11 @@ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSocketSource o setActualInstance(o); } + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineOpentelemetrySource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineKafkaSource", new GenericType() {}); @@ -1027,6 +1083,9 @@ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSocketSource o schemas.put( "ObservabilityPipelineSocketSource", new GenericType() {}); + schemas.put( + "ObservabilityPipelineOpentelemetrySource", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigSourceItem.class, Collections.unmodifiableMap(schemas)); } @@ -1046,7 +1105,7 @@ public Map getSchemas() { * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, - * ObservabilityPipelineSocketSource + * ObservabilityPipelineSocketSource, ObservabilityPipelineOpentelemetrySource * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1133,6 +1192,11 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineOpentelemetrySource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -1147,7 +1211,8 @@ public void setActualInstance(Object instance) { + " ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource," + " ObservabilityPipelineAmazonDataFirehoseSource," + " ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource," - + " ObservabilityPipelineLogstashSource, ObservabilityPipelineSocketSource"); + + " ObservabilityPipelineLogstashSource, ObservabilityPipelineSocketSource," + + " ObservabilityPipelineOpentelemetrySource"); } /** @@ -1159,7 +1224,7 @@ public void setActualInstance(Object instance) { * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, - * ObservabilityPipelineSocketSource + * ObservabilityPipelineSocketSource, ObservabilityPipelineOpentelemetrySource * * @return The actual instance (ObservabilityPipelineKafkaSource, * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, @@ -1169,7 +1234,7 @@ public void setActualInstance(Object instance) { * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, - * ObservabilityPipelineSocketSource) + * ObservabilityPipelineSocketSource, ObservabilityPipelineOpentelemetrySource) */ @Override public Object getActualInstance() { @@ -1369,4 +1434,16 @@ public ObservabilityPipelineSocketSource getObservabilityPipelineSocketSource() throws ClassCastException { return (ObservabilityPipelineSocketSource) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineOpentelemetrySource`. If the actual instance + * is not `ObservabilityPipelineOpentelemetrySource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineOpentelemetrySource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOpentelemetrySource` + */ + public ObservabilityPipelineOpentelemetrySource getObservabilityPipelineOpentelemetrySource() + throws ClassCastException { + return (ObservabilityPipelineOpentelemetrySource) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java index c414b55e50e..34d6a06e07a 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java @@ -23,6 +23,7 @@ @JsonPropertyOrder({ ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_API_VERSION, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_BULK_INDEX, + ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_DATA_STREAM, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_ID, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_INPUTS, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_TYPE @@ -37,6 +38,9 @@ public class ObservabilityPipelineElasticsearchDestination { public static final String JSON_PROPERTY_BULK_INDEX = "bulk_index"; private String bulkIndex; + public static final String JSON_PROPERTY_DATA_STREAM = "data_stream"; + private ObservabilityPipelineElasticsearchDestinationDataStream dataStream; + public static final String JSON_PROPERTY_ID = "id"; private String id; @@ -108,6 +112,29 @@ public void setBulkIndex(String bulkIndex) { this.bulkIndex = bulkIndex; } + public ObservabilityPipelineElasticsearchDestination dataStream( + ObservabilityPipelineElasticsearchDestinationDataStream dataStream) { + this.dataStream = dataStream; + this.unparsed |= dataStream.unparsed; + return this; + } + + /** + * Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + * + * @return dataStream + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DATA_STREAM) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineElasticsearchDestinationDataStream getDataStream() { + return dataStream; + } + + public void setDataStream(ObservabilityPipelineElasticsearchDestinationDataStream dataStream) { + this.dataStream = dataStream; + } + public ObservabilityPipelineElasticsearchDestination id(String id) { this.id = id; return this; @@ -238,6 +265,7 @@ public boolean equals(Object o) { (ObservabilityPipelineElasticsearchDestination) o; return Objects.equals(this.apiVersion, observabilityPipelineElasticsearchDestination.apiVersion) && Objects.equals(this.bulkIndex, observabilityPipelineElasticsearchDestination.bulkIndex) + && Objects.equals(this.dataStream, observabilityPipelineElasticsearchDestination.dataStream) && Objects.equals(this.id, observabilityPipelineElasticsearchDestination.id) && Objects.equals(this.inputs, observabilityPipelineElasticsearchDestination.inputs) && Objects.equals(this.type, observabilityPipelineElasticsearchDestination.type) @@ -248,7 +276,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(apiVersion, bulkIndex, id, inputs, type, additionalProperties); + return Objects.hash(apiVersion, bulkIndex, dataStream, id, inputs, type, additionalProperties); } @Override @@ -257,6 +285,7 @@ public String toString() { sb.append("class ObservabilityPipelineElasticsearchDestination {\n"); sb.append(" apiVersion: ").append(toIndentedString(apiVersion)).append("\n"); sb.append(" bulkIndex: ").append(toIndentedString(bulkIndex)).append("\n"); + sb.append(" dataStream: ").append(toIndentedString(dataStream)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java new file mode 100644 index 00000000000..013ee28fc95 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java @@ -0,0 +1,203 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. */ +@JsonPropertyOrder({ + ObservabilityPipelineElasticsearchDestinationDataStream.JSON_PROPERTY_DATASET, + ObservabilityPipelineElasticsearchDestinationDataStream.JSON_PROPERTY_DTYPE, + ObservabilityPipelineElasticsearchDestinationDataStream.JSON_PROPERTY_NAMESPACE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineElasticsearchDestinationDataStream { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DATASET = "dataset"; + private String dataset; + + public static final String JSON_PROPERTY_DTYPE = "dtype"; + private String dtype; + + public static final String JSON_PROPERTY_NAMESPACE = "namespace"; + private String namespace; + + public ObservabilityPipelineElasticsearchDestinationDataStream dataset(String dataset) { + this.dataset = dataset; + return this; + } + + /** + * The data stream dataset for your logs. This groups logs by their source or application. + * + * @return dataset + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DATASET) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDataset() { + return dataset; + } + + public void setDataset(String dataset) { + this.dataset = dataset; + } + + public ObservabilityPipelineElasticsearchDestinationDataStream dtype(String dtype) { + this.dtype = dtype; + return this; + } + + /** + * The data stream type for your logs. This determines how logs are categorized within the data + * stream. + * + * @return dtype + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DTYPE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDtype() { + return dtype; + } + + public void setDtype(String dtype) { + this.dtype = dtype; + } + + public ObservabilityPipelineElasticsearchDestinationDataStream namespace(String namespace) { + this.namespace = namespace; + return this; + } + + /** + * The data stream namespace for your logs. This separates logs into different environments or + * domains. + * + * @return namespace + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_NAMESPACE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getNamespace() { + return namespace; + } + + public void setNamespace(String namespace) { + this.namespace = namespace; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineElasticsearchDestinationDataStream + */ + @JsonAnySetter + public ObservabilityPipelineElasticsearchDestinationDataStream putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineElasticsearchDestinationDataStream object is equal to + * o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineElasticsearchDestinationDataStream + observabilityPipelineElasticsearchDestinationDataStream = + (ObservabilityPipelineElasticsearchDestinationDataStream) o; + return Objects.equals( + this.dataset, observabilityPipelineElasticsearchDestinationDataStream.dataset) + && Objects.equals(this.dtype, observabilityPipelineElasticsearchDestinationDataStream.dtype) + && Objects.equals( + this.namespace, observabilityPipelineElasticsearchDestinationDataStream.namespace) + && Objects.equals( + this.additionalProperties, + observabilityPipelineElasticsearchDestinationDataStream.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(dataset, dtype, namespace, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineElasticsearchDestinationDataStream {\n"); + sb.append(" dataset: ").append(toIndentedString(dataset)).append("\n"); + sb.append(" dtype: ").append(toIndentedString(dtype)).append("\n"); + sb.append(" namespace: ").append(toIndentedString(namespace)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java index e19f2eb045c..51bd0b9b58b 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java @@ -18,8 +18,9 @@ import java.util.Objects; /** - * The enrichment_table processor enriches logs using a static CSV file or GeoIP - * database. + * The enrichment_table processor enriches logs using a static CSV file, GeoIP + * database, or reference table. Exactly one of file, geoip, or + * reference_table must be configured. */ @JsonPropertyOrder({ ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_DISPLAY_NAME, @@ -28,6 +29,7 @@ ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_GEOIP, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_ID, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_REFERENCE_TABLE, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_TARGET, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_TYPE }) @@ -53,6 +55,9 @@ public class ObservabilityPipelineEnrichmentTableProcessor { public static final String JSON_PROPERTY_INCLUDE = "include"; private String include; + public static final String JSON_PROPERTY_REFERENCE_TABLE = "reference_table"; + private ObservabilityPipelineEnrichmentTableReferenceTable referenceTable; + public static final String JSON_PROPERTY_TARGET = "target"; private String target; @@ -205,6 +210,29 @@ public void setInclude(String include) { this.include = include; } + public ObservabilityPipelineEnrichmentTableProcessor referenceTable( + ObservabilityPipelineEnrichmentTableReferenceTable referenceTable) { + this.referenceTable = referenceTable; + this.unparsed |= referenceTable.unparsed; + return this; + } + + /** + * Uses a Datadog reference table to enrich logs. + * + * @return referenceTable + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_REFERENCE_TABLE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineEnrichmentTableReferenceTable getReferenceTable() { + return referenceTable; + } + + public void setReferenceTable(ObservabilityPipelineEnrichmentTableReferenceTable referenceTable) { + this.referenceTable = referenceTable; + } + public ObservabilityPipelineEnrichmentTableProcessor target(String target) { this.target = target; return this; @@ -315,6 +343,8 @@ public boolean equals(Object o) { && Objects.equals(this.geoip, observabilityPipelineEnrichmentTableProcessor.geoip) && Objects.equals(this.id, observabilityPipelineEnrichmentTableProcessor.id) && Objects.equals(this.include, observabilityPipelineEnrichmentTableProcessor.include) + && Objects.equals( + this.referenceTable, observabilityPipelineEnrichmentTableProcessor.referenceTable) && Objects.equals(this.target, observabilityPipelineEnrichmentTableProcessor.target) && Objects.equals(this.type, observabilityPipelineEnrichmentTableProcessor.type) && Objects.equals( @@ -325,7 +355,16 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - displayName, enabled, file, geoip, id, include, target, type, additionalProperties); + displayName, + enabled, + file, + geoip, + id, + include, + referenceTable, + target, + type, + additionalProperties); } @Override @@ -338,6 +377,7 @@ public String toString() { sb.append(" geoip: ").append(toIndentedString(geoip)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" referenceTable: ").append(toIndentedString(referenceTable)).append("\n"); sb.append(" target: ").append(toIndentedString(target)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java new file mode 100644 index 00000000000..8577ddcb78c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java @@ -0,0 +1,219 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Uses a Datadog reference table to enrich logs. */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableReferenceTable.JSON_PROPERTY_COLUMNS, + ObservabilityPipelineEnrichmentTableReferenceTable.JSON_PROPERTY_KEY_FIELD, + ObservabilityPipelineEnrichmentTableReferenceTable.JSON_PROPERTY_TABLE_ID +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableReferenceTable { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_COLUMNS = "columns"; + private List columns = null; + + public static final String JSON_PROPERTY_KEY_FIELD = "key_field"; + private String keyField; + + public static final String JSON_PROPERTY_TABLE_ID = "table_id"; + private String tableId; + + public ObservabilityPipelineEnrichmentTableReferenceTable() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableReferenceTable( + @JsonProperty(required = true, value = JSON_PROPERTY_KEY_FIELD) String keyField, + @JsonProperty(required = true, value = JSON_PROPERTY_TABLE_ID) String tableId) { + this.keyField = keyField; + this.tableId = tableId; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable columns(List columns) { + this.columns = columns; + return this; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable addColumnsItem(String columnsItem) { + if (this.columns == null) { + this.columns = new ArrayList<>(); + } + this.columns.add(columnsItem); + return this; + } + + /** + * List of column names to include from the reference table. If not provided, all columns are + * included. + * + * @return columns + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_COLUMNS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getColumns() { + return columns; + } + + public void setColumns(List columns) { + this.columns = columns; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable keyField(String keyField) { + this.keyField = keyField; + return this; + } + + /** + * Path to the field in the log event to match against the reference table. + * + * @return keyField + */ + @JsonProperty(JSON_PROPERTY_KEY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getKeyField() { + return keyField; + } + + public void setKeyField(String keyField) { + this.keyField = keyField; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable tableId(String tableId) { + this.tableId = tableId; + return this; + } + + /** + * The unique identifier of the reference table. + * + * @return tableId + */ + @JsonProperty(JSON_PROPERTY_TABLE_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTableId() { + return tableId; + } + + public void setTableId(String tableId) { + this.tableId = tableId; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableReferenceTable + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableReferenceTable putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineEnrichmentTableReferenceTable object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableReferenceTable + observabilityPipelineEnrichmentTableReferenceTable = + (ObservabilityPipelineEnrichmentTableReferenceTable) o; + return Objects.equals(this.columns, observabilityPipelineEnrichmentTableReferenceTable.columns) + && Objects.equals( + this.keyField, observabilityPipelineEnrichmentTableReferenceTable.keyField) + && Objects.equals(this.tableId, observabilityPipelineEnrichmentTableReferenceTable.tableId) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableReferenceTable.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(columns, keyField, tableId, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableReferenceTable {\n"); + sb.append(" columns: ").append(toIndentedString(columns)).append("\n"); + sb.append(" keyField: ").append(toIndentedString(keyField)).append("\n"); + sb.append(" tableId: ").append(toIndentedString(tableId)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java new file mode 100644 index 00000000000..2f731674a97 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java @@ -0,0 +1,595 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The kafka destination sends logs to Apache Kafka topics. */ +@JsonPropertyOrder({ + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_COMPRESSION, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_HEADERS_KEY, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ID, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_KEY_FIELD, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_LIBRDKAFKA_OPTIONS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_MESSAGE_TIMEOUT_MS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_DURATION_SECS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_NUM, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SASL, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SOCKET_TIMEOUT_MS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TLS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TOPIC, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineKafkaDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_COMPRESSION = "compression"; + private ObservabilityPipelineKafkaDestinationCompression compression; + + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineKafkaDestinationEncoding encoding; + + public static final String JSON_PROPERTY_HEADERS_KEY = "headers_key"; + private String headersKey; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEY_FIELD = "key_field"; + private String keyField; + + public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options"; + private List librdkafkaOptions = null; + + public static final String JSON_PROPERTY_MESSAGE_TIMEOUT_MS = "message_timeout_ms"; + private Long messageTimeoutMs; + + public static final String JSON_PROPERTY_RATE_LIMIT_DURATION_SECS = "rate_limit_duration_secs"; + private Long rateLimitDurationSecs; + + public static final String JSON_PROPERTY_RATE_LIMIT_NUM = "rate_limit_num"; + private Long rateLimitNum; + + public static final String JSON_PROPERTY_SASL = "sasl"; + private ObservabilityPipelineKafkaSasl sasl; + + public static final String JSON_PROPERTY_SOCKET_TIMEOUT_MS = "socket_timeout_ms"; + private Long socketTimeoutMs; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TOPIC = "topic"; + private String topic; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineKafkaDestinationType type = + ObservabilityPipelineKafkaDestinationType.KAFKA; + + public ObservabilityPipelineKafkaDestination() {} + + @JsonCreator + public ObservabilityPipelineKafkaDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ENCODING) + ObservabilityPipelineKafkaDestinationEncoding encoding, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TOPIC) String topic, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineKafkaDestinationType type) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + this.id = id; + this.inputs = inputs; + this.topic = topic; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineKafkaDestination compression( + ObservabilityPipelineKafkaDestinationCompression compression) { + this.compression = compression; + this.unparsed |= !compression.isValid(); + return this; + } + + /** + * Compression codec for Kafka messages. + * + * @return compression + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_COMPRESSION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineKafkaDestinationCompression getCompression() { + return compression; + } + + public void setCompression(ObservabilityPipelineKafkaDestinationCompression compression) { + if (!compression.isValid()) { + this.unparsed = true; + } + this.compression = compression; + } + + public ObservabilityPipelineKafkaDestination encoding( + ObservabilityPipelineKafkaDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * Encoding format for log events. + * + * @return encoding + */ + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineKafkaDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineKafkaDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineKafkaDestination headersKey(String headersKey) { + this.headersKey = headersKey; + return this; + } + + /** + * The field name to use for Kafka message headers. + * + * @return headersKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HEADERS_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHeadersKey() { + return headersKey; + } + + public void setHeadersKey(String headersKey) { + this.headersKey = headersKey; + } + + public ObservabilityPipelineKafkaDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineKafkaDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineKafkaDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineKafkaDestination keyField(String keyField) { + this.keyField = keyField; + return this; + } + + /** + * The field name to use as the Kafka message key. + * + * @return keyField + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEY_FIELD) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getKeyField() { + return keyField; + } + + public void setKeyField(String keyField) { + this.keyField = keyField; + } + + public ObservabilityPipelineKafkaDestination librdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineKafkaDestination addLibrdkafkaOptionsItem( + ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) { + if (this.librdkafkaOptions == null) { + this.librdkafkaOptions = new ArrayList<>(); + } + this.librdkafkaOptions.add(librdkafkaOptionsItem); + this.unparsed |= librdkafkaOptionsItem.unparsed; + return this; + } + + /** + * Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + * + * @return librdkafkaOptions + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getLibrdkafkaOptions() { + return librdkafkaOptions; + } + + public void setLibrdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + } + + public ObservabilityPipelineKafkaDestination messageTimeoutMs(Long messageTimeoutMs) { + this.messageTimeoutMs = messageTimeoutMs; + return this; + } + + /** + * Maximum time in milliseconds to wait for message delivery confirmation. minimum: 1 + * + * @return messageTimeoutMs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_MESSAGE_TIMEOUT_MS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getMessageTimeoutMs() { + return messageTimeoutMs; + } + + public void setMessageTimeoutMs(Long messageTimeoutMs) { + this.messageTimeoutMs = messageTimeoutMs; + } + + public ObservabilityPipelineKafkaDestination rateLimitDurationSecs(Long rateLimitDurationSecs) { + this.rateLimitDurationSecs = rateLimitDurationSecs; + return this; + } + + /** + * Duration in seconds for the rate limit window. minimum: 1 + * + * @return rateLimitDurationSecs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_RATE_LIMIT_DURATION_SECS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getRateLimitDurationSecs() { + return rateLimitDurationSecs; + } + + public void setRateLimitDurationSecs(Long rateLimitDurationSecs) { + this.rateLimitDurationSecs = rateLimitDurationSecs; + } + + public ObservabilityPipelineKafkaDestination rateLimitNum(Long rateLimitNum) { + this.rateLimitNum = rateLimitNum; + return this; + } + + /** + * Maximum number of messages allowed per rate limit duration. minimum: 1 + * + * @return rateLimitNum + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_RATE_LIMIT_NUM) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getRateLimitNum() { + return rateLimitNum; + } + + public void setRateLimitNum(Long rateLimitNum) { + this.rateLimitNum = rateLimitNum; + } + + public ObservabilityPipelineKafkaDestination sasl(ObservabilityPipelineKafkaSasl sasl) { + this.sasl = sasl; + this.unparsed |= sasl.unparsed; + return this; + } + + /** + * Specifies the SASL mechanism for authenticating with a Kafka cluster. + * + * @return sasl + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SASL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineKafkaSasl getSasl() { + return sasl; + } + + public void setSasl(ObservabilityPipelineKafkaSasl sasl) { + this.sasl = sasl; + } + + public ObservabilityPipelineKafkaDestination socketTimeoutMs(Long socketTimeoutMs) { + this.socketTimeoutMs = socketTimeoutMs; + return this; + } + + /** + * Socket timeout in milliseconds for network requests. minimum: 10 maximum: 300000 + * + * @return socketTimeoutMs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SOCKET_TIMEOUT_MS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getSocketTimeoutMs() { + return socketTimeoutMs; + } + + public void setSocketTimeoutMs(Long socketTimeoutMs) { + this.socketTimeoutMs = socketTimeoutMs; + } + + public ObservabilityPipelineKafkaDestination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineKafkaDestination topic(String topic) { + this.topic = topic; + return this; + } + + /** + * The Kafka topic name to publish logs to. + * + * @return topic + */ + @JsonProperty(JSON_PROPERTY_TOPIC) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTopic() { + return topic; + } + + public void setTopic(String topic) { + this.topic = topic; + } + + public ObservabilityPipelineKafkaDestination type( + ObservabilityPipelineKafkaDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be kafka. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineKafkaDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineKafkaDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineKafkaDestination + */ + @JsonAnySetter + public ObservabilityPipelineKafkaDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineKafkaDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineKafkaDestination observabilityPipelineKafkaDestination = + (ObservabilityPipelineKafkaDestination) o; + return Objects.equals(this.compression, observabilityPipelineKafkaDestination.compression) + && Objects.equals(this.encoding, observabilityPipelineKafkaDestination.encoding) + && Objects.equals(this.headersKey, observabilityPipelineKafkaDestination.headersKey) + && Objects.equals(this.id, observabilityPipelineKafkaDestination.id) + && Objects.equals(this.inputs, observabilityPipelineKafkaDestination.inputs) + && Objects.equals(this.keyField, observabilityPipelineKafkaDestination.keyField) + && Objects.equals( + this.librdkafkaOptions, observabilityPipelineKafkaDestination.librdkafkaOptions) + && Objects.equals( + this.messageTimeoutMs, observabilityPipelineKafkaDestination.messageTimeoutMs) + && Objects.equals( + this.rateLimitDurationSecs, observabilityPipelineKafkaDestination.rateLimitDurationSecs) + && Objects.equals(this.rateLimitNum, observabilityPipelineKafkaDestination.rateLimitNum) + && Objects.equals(this.sasl, observabilityPipelineKafkaDestination.sasl) + && Objects.equals( + this.socketTimeoutMs, observabilityPipelineKafkaDestination.socketTimeoutMs) + && Objects.equals(this.tls, observabilityPipelineKafkaDestination.tls) + && Objects.equals(this.topic, observabilityPipelineKafkaDestination.topic) + && Objects.equals(this.type, observabilityPipelineKafkaDestination.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineKafkaDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + compression, + encoding, + headersKey, + id, + inputs, + keyField, + librdkafkaOptions, + messageTimeoutMs, + rateLimitDurationSecs, + rateLimitNum, + sasl, + socketTimeoutMs, + tls, + topic, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineKafkaDestination {\n"); + sb.append(" compression: ").append(toIndentedString(compression)).append("\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" headersKey: ").append(toIndentedString(headersKey)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keyField: ").append(toIndentedString(keyField)).append("\n"); + sb.append(" librdkafkaOptions: ").append(toIndentedString(librdkafkaOptions)).append("\n"); + sb.append(" messageTimeoutMs: ").append(toIndentedString(messageTimeoutMs)).append("\n"); + sb.append(" rateLimitDurationSecs: ") + .append(toIndentedString(rateLimitDurationSecs)) + .append("\n"); + sb.append(" rateLimitNum: ").append(toIndentedString(rateLimitNum)).append("\n"); + sb.append(" sasl: ").append(toIndentedString(sasl)).append("\n"); + sb.append(" socketTimeoutMs: ").append(toIndentedString(socketTimeoutMs)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" topic: ").append(toIndentedString(topic)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java new file mode 100644 index 00000000000..0a66e9a713f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java @@ -0,0 +1,71 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Compression codec for Kafka messages. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationCompression + .ObservabilityPipelineKafkaDestinationCompressionSerializer.class) +public class ObservabilityPipelineKafkaDestinationCompression extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("none", "gzip", "snappy", "lz4", "zstd")); + + public static final ObservabilityPipelineKafkaDestinationCompression NONE = + new ObservabilityPipelineKafkaDestinationCompression("none"); + public static final ObservabilityPipelineKafkaDestinationCompression GZIP = + new ObservabilityPipelineKafkaDestinationCompression("gzip"); + public static final ObservabilityPipelineKafkaDestinationCompression SNAPPY = + new ObservabilityPipelineKafkaDestinationCompression("snappy"); + public static final ObservabilityPipelineKafkaDestinationCompression LZ4 = + new ObservabilityPipelineKafkaDestinationCompression("lz4"); + public static final ObservabilityPipelineKafkaDestinationCompression ZSTD = + new ObservabilityPipelineKafkaDestinationCompression("zstd"); + + ObservabilityPipelineKafkaDestinationCompression(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationCompressionSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationCompressionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationCompressionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationCompression value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationCompression fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationCompression(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java new file mode 100644 index 00000000000..57abf5f828e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Encoding format for log events. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationEncoding + .ObservabilityPipelineKafkaDestinationEncodingSerializer.class) +public class ObservabilityPipelineKafkaDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("json", "raw_message")); + + public static final ObservabilityPipelineKafkaDestinationEncoding JSON = + new ObservabilityPipelineKafkaDestinationEncoding("json"); + public static final ObservabilityPipelineKafkaDestinationEncoding RAW_MESSAGE = + new ObservabilityPipelineKafkaDestinationEncoding("raw_message"); + + ObservabilityPipelineKafkaDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java new file mode 100644 index 00000000000..0967bbb11c5 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be kafka. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationType + .ObservabilityPipelineKafkaDestinationTypeSerializer.class) +public class ObservabilityPipelineKafkaDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("kafka")); + + public static final ObservabilityPipelineKafkaDestinationType KAFKA = + new ObservabilityPipelineKafkaDestinationType("kafka"); + + ObservabilityPipelineKafkaDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationType fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java similarity index 78% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java index e7211139e49..8d4ed2a1d60 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java @@ -19,15 +19,15 @@ /** * Represents a key-value pair used to configure low-level librdkafka client options - * for Kafka sources, such as timeouts, buffer sizes, and security settings. + * for Kafka source and destination, such as timeouts, buffer sizes, and security settings. */ @JsonPropertyOrder({ - ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_NAME, - ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_VALUE + ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_NAME, + ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_VALUE }) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineKafkaSourceLibrdkafkaOption { +public class ObservabilityPipelineKafkaLibrdkafkaOption { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_NAME = "name"; private String name; @@ -35,17 +35,17 @@ public class ObservabilityPipelineKafkaSourceLibrdkafkaOption { public static final String JSON_PROPERTY_VALUE = "value"; private String value; - public ObservabilityPipelineKafkaSourceLibrdkafkaOption() {} + public ObservabilityPipelineKafkaLibrdkafkaOption() {} @JsonCreator - public ObservabilityPipelineKafkaSourceLibrdkafkaOption( + public ObservabilityPipelineKafkaLibrdkafkaOption( @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { this.name = name; this.value = value; } - public ObservabilityPipelineKafkaSourceLibrdkafkaOption name(String name) { + public ObservabilityPipelineKafkaLibrdkafkaOption name(String name) { this.name = name; return this; } @@ -65,7 +65,7 @@ public void setName(String name) { this.name = name; } - public ObservabilityPipelineKafkaSourceLibrdkafkaOption value(String value) { + public ObservabilityPipelineKafkaLibrdkafkaOption value(String value) { this.value = value; return this; } @@ -97,10 +97,10 @@ public void setValue(String value) { * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineKafkaSourceLibrdkafkaOption + * @return ObservabilityPipelineKafkaLibrdkafkaOption */ @JsonAnySetter - public ObservabilityPipelineKafkaSourceLibrdkafkaOption putAdditionalProperty( + public ObservabilityPipelineKafkaLibrdkafkaOption putAdditionalProperty( String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); @@ -132,7 +132,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineKafkaSourceLibrdkafkaOption object is equal to o. */ + /** Return true if this ObservabilityPipelineKafkaLibrdkafkaOption object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -141,14 +141,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineKafkaSourceLibrdkafkaOption - observabilityPipelineKafkaSourceLibrdkafkaOption = - (ObservabilityPipelineKafkaSourceLibrdkafkaOption) o; - return Objects.equals(this.name, observabilityPipelineKafkaSourceLibrdkafkaOption.name) - && Objects.equals(this.value, observabilityPipelineKafkaSourceLibrdkafkaOption.value) + ObservabilityPipelineKafkaLibrdkafkaOption observabilityPipelineKafkaLibrdkafkaOption = + (ObservabilityPipelineKafkaLibrdkafkaOption) o; + return Objects.equals(this.name, observabilityPipelineKafkaLibrdkafkaOption.name) + && Objects.equals(this.value, observabilityPipelineKafkaLibrdkafkaOption.value) && Objects.equals( this.additionalProperties, - observabilityPipelineKafkaSourceLibrdkafkaOption.additionalProperties); + observabilityPipelineKafkaLibrdkafkaOption.additionalProperties); } @Override @@ -159,7 +158,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineKafkaSourceLibrdkafkaOption {\n"); + sb.append("class ObservabilityPipelineKafkaLibrdkafkaOption {\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" value: ").append(toIndentedString(value)).append("\n"); sb.append(" additionalProperties: ") diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java similarity index 78% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java index 0475f35416b..89c8f57a3ac 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java @@ -17,16 +17,16 @@ import java.util.Objects; /** Specifies the SASL mechanism for authenticating with a Kafka cluster. */ -@JsonPropertyOrder({ObservabilityPipelineKafkaSourceSasl.JSON_PROPERTY_MECHANISM}) +@JsonPropertyOrder({ObservabilityPipelineKafkaSasl.JSON_PROPERTY_MECHANISM}) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineKafkaSourceSasl { +public class ObservabilityPipelineKafkaSasl { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_MECHANISM = "mechanism"; - private ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism; + private ObservabilityPipelineKafkaSaslMechanism mechanism; - public ObservabilityPipelineKafkaSourceSasl mechanism( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + public ObservabilityPipelineKafkaSasl mechanism( + ObservabilityPipelineKafkaSaslMechanism mechanism) { this.mechanism = mechanism; this.unparsed |= !mechanism.isValid(); return this; @@ -40,11 +40,11 @@ public ObservabilityPipelineKafkaSourceSasl mechanism( @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_MECHANISM) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public ObservabilityPipelinePipelineKafkaSourceSaslMechanism getMechanism() { + public ObservabilityPipelineKafkaSaslMechanism getMechanism() { return mechanism; } - public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + public void setMechanism(ObservabilityPipelineKafkaSaslMechanism mechanism) { if (!mechanism.isValid()) { this.unparsed = true; } @@ -63,10 +63,10 @@ public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism m * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineKafkaSourceSasl + * @return ObservabilityPipelineKafkaSasl */ @JsonAnySetter - public ObservabilityPipelineKafkaSourceSasl putAdditionalProperty(String key, Object value) { + public ObservabilityPipelineKafkaSasl putAdditionalProperty(String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); } @@ -97,7 +97,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineKafkaSourceSasl object is equal to o. */ + /** Return true if this ObservabilityPipelineKafkaSasl object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -106,11 +106,11 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineKafkaSourceSasl observabilityPipelineKafkaSourceSasl = - (ObservabilityPipelineKafkaSourceSasl) o; - return Objects.equals(this.mechanism, observabilityPipelineKafkaSourceSasl.mechanism) + ObservabilityPipelineKafkaSasl observabilityPipelineKafkaSasl = + (ObservabilityPipelineKafkaSasl) o; + return Objects.equals(this.mechanism, observabilityPipelineKafkaSasl.mechanism) && Objects.equals( - this.additionalProperties, observabilityPipelineKafkaSourceSasl.additionalProperties); + this.additionalProperties, observabilityPipelineKafkaSasl.additionalProperties); } @Override @@ -121,7 +121,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineKafkaSourceSasl {\n"); + sb.append("class ObservabilityPipelineKafkaSasl {\n"); sb.append(" mechanism: ").append(toIndentedString(mechanism)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java new file mode 100644 index 00000000000..8022d54681f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** SASL mechanism used for Kafka authentication. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaSaslMechanism.ObservabilityPipelineKafkaSaslMechanismSerializer + .class) +public class ObservabilityPipelineKafkaSaslMechanism extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512")); + + public static final ObservabilityPipelineKafkaSaslMechanism PLAIN = + new ObservabilityPipelineKafkaSaslMechanism("PLAIN"); + public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_256 = + new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-256"); + public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_512 = + new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-512"); + + ObservabilityPipelineKafkaSaslMechanism(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaSaslMechanismSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaSaslMechanismSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaSaslMechanismSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaSaslMechanism value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaSaslMechanism fromValue(String value) { + return new ObservabilityPipelineKafkaSaslMechanism(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java index d8c3ea6254d..c74dbd02f69 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java @@ -40,10 +40,10 @@ public class ObservabilityPipelineKafkaSource { private String id; public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options"; - private List librdkafkaOptions = null; + private List librdkafkaOptions = null; public static final String JSON_PROPERTY_SASL = "sasl"; - private ObservabilityPipelineKafkaSourceSasl sasl; + private ObservabilityPipelineKafkaSasl sasl; public static final String JSON_PROPERTY_TLS = "tls"; private ObservabilityPipelineTls tls; @@ -112,16 +112,16 @@ public void setId(String id) { } public ObservabilityPipelineKafkaSource librdkafkaOptions( - List librdkafkaOptions) { + List librdkafkaOptions) { this.librdkafkaOptions = librdkafkaOptions; - for (ObservabilityPipelineKafkaSourceLibrdkafkaOption item : librdkafkaOptions) { + for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) { this.unparsed |= item.unparsed; } return this; } public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem( - ObservabilityPipelineKafkaSourceLibrdkafkaOption librdkafkaOptionsItem) { + ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) { if (this.librdkafkaOptions == null) { this.librdkafkaOptions = new ArrayList<>(); } @@ -138,16 +138,16 @@ public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem( @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public List getLibrdkafkaOptions() { + public List getLibrdkafkaOptions() { return librdkafkaOptions; } public void setLibrdkafkaOptions( - List librdkafkaOptions) { + List librdkafkaOptions) { this.librdkafkaOptions = librdkafkaOptions; } - public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSasl sasl) { + public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSasl sasl) { this.sasl = sasl; this.unparsed |= sasl.unparsed; return this; @@ -161,11 +161,11 @@ public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSas @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_SASL) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public ObservabilityPipelineKafkaSourceSasl getSasl() { + public ObservabilityPipelineKafkaSasl getSasl() { return sasl; } - public void setSasl(ObservabilityPipelineKafkaSourceSasl sasl) { + public void setSasl(ObservabilityPipelineKafkaSasl sasl) { this.sasl = sasl; } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java new file mode 100644 index 00000000000..9c0f57d11f8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java @@ -0,0 +1,274 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The opentelemetry source receives telemetry data using the OpenTelemetry Protocol + * (OTLP) over gRPC and HTTP. + */ +@JsonPropertyOrder({ + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_GRPC_ADDRESS_KEY, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_HTTP_ADDRESS_KEY, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_ID, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_TLS, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineOpentelemetrySource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_GRPC_ADDRESS_KEY = "grpc_address_key"; + private String grpcAddressKey; + + public static final String JSON_PROPERTY_HTTP_ADDRESS_KEY = "http_address_key"; + private String httpAddressKey; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineOpentelemetrySourceType type = + ObservabilityPipelineOpentelemetrySourceType.OPENTELEMETRY; + + public ObservabilityPipelineOpentelemetrySource() {} + + @JsonCreator + public ObservabilityPipelineOpentelemetrySource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineOpentelemetrySourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineOpentelemetrySource grpcAddressKey(String grpcAddressKey) { + this.grpcAddressKey = grpcAddressKey; + return this; + } + + /** + * Environment variable name containing the gRPC server address for receiving OTLP data. Must be a + * valid environment variable name (alphanumeric characters and underscores only). + * + * @return grpcAddressKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GRPC_ADDRESS_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getGrpcAddressKey() { + return grpcAddressKey; + } + + public void setGrpcAddressKey(String grpcAddressKey) { + this.grpcAddressKey = grpcAddressKey; + } + + public ObservabilityPipelineOpentelemetrySource httpAddressKey(String httpAddressKey) { + this.httpAddressKey = httpAddressKey; + return this; + } + + /** + * Environment variable name containing the HTTP server address for receiving OTLP data. Must be a + * valid environment variable name (alphanumeric characters and underscores only). + * + * @return httpAddressKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HTTP_ADDRESS_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHttpAddressKey() { + return httpAddressKey; + } + + public void setHttpAddressKey(String httpAddressKey) { + this.httpAddressKey = httpAddressKey; + } + + public ObservabilityPipelineOpentelemetrySource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineOpentelemetrySource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineOpentelemetrySource type( + ObservabilityPipelineOpentelemetrySourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be opentelemetry. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineOpentelemetrySourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineOpentelemetrySourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineOpentelemetrySource + */ + @JsonAnySetter + public ObservabilityPipelineOpentelemetrySource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineOpentelemetrySource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineOpentelemetrySource observabilityPipelineOpentelemetrySource = + (ObservabilityPipelineOpentelemetrySource) o; + return Objects.equals( + this.grpcAddressKey, observabilityPipelineOpentelemetrySource.grpcAddressKey) + && Objects.equals( + this.httpAddressKey, observabilityPipelineOpentelemetrySource.httpAddressKey) + && Objects.equals(this.id, observabilityPipelineOpentelemetrySource.id) + && Objects.equals(this.tls, observabilityPipelineOpentelemetrySource.tls) + && Objects.equals(this.type, observabilityPipelineOpentelemetrySource.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineOpentelemetrySource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(grpcAddressKey, httpAddressKey, id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineOpentelemetrySource {\n"); + sb.append(" grpcAddressKey: ").append(toIndentedString(grpcAddressKey)).append("\n"); + sb.append(" httpAddressKey: ").append(toIndentedString(httpAddressKey)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java new file mode 100644 index 00000000000..ffeef30f86e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be opentelemetry. */ +@JsonSerialize( + using = + ObservabilityPipelineOpentelemetrySourceType + .ObservabilityPipelineOpentelemetrySourceTypeSerializer.class) +public class ObservabilityPipelineOpentelemetrySourceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("opentelemetry")); + + public static final ObservabilityPipelineOpentelemetrySourceType OPENTELEMETRY = + new ObservabilityPipelineOpentelemetrySourceType("opentelemetry"); + + ObservabilityPipelineOpentelemetrySourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineOpentelemetrySourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineOpentelemetrySourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineOpentelemetrySourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineOpentelemetrySourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineOpentelemetrySourceType fromValue(String value) { + return new ObservabilityPipelineOpentelemetrySourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java new file mode 100644 index 00000000000..5cd32403878 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java @@ -0,0 +1,503 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The parse_xml processor parses XML from a specified field and extracts it into the + * event. + */ +@JsonPropertyOrder({ + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ALWAYS_USE_TEXT_KEY, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ATTR_PREFIX, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_FIELD, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_INCLUDE_ATTR, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_BOOL, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_NULL, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_NUMBER, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_TEXT_KEY, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseXMLProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ALWAYS_USE_TEXT_KEY = "always_use_text_key"; + private Boolean alwaysUseTextKey; + + public static final String JSON_PROPERTY_ATTR_PREFIX = "attr_prefix"; + private String attrPrefix; + + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INCLUDE_ATTR = "include_attr"; + private Boolean includeAttr; + + public static final String JSON_PROPERTY_PARSE_BOOL = "parse_bool"; + private Boolean parseBool; + + public static final String JSON_PROPERTY_PARSE_NULL = "parse_null"; + private Boolean parseNull; + + public static final String JSON_PROPERTY_PARSE_NUMBER = "parse_number"; + private Boolean parseNumber; + + public static final String JSON_PROPERTY_TEXT_KEY = "text_key"; + private String textKey; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineParseXMLProcessorType type = + ObservabilityPipelineParseXMLProcessorType.PARSE_XML; + + public ObservabilityPipelineParseXMLProcessor() {} + + @JsonCreator + public ObservabilityPipelineParseXMLProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineParseXMLProcessorType type) { + this.enabled = enabled; + this.field = field; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineParseXMLProcessor alwaysUseTextKey(Boolean alwaysUseTextKey) { + this.alwaysUseTextKey = alwaysUseTextKey; + return this; + } + + /** + * Whether to always use a text key for element content. + * + * @return alwaysUseTextKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ALWAYS_USE_TEXT_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getAlwaysUseTextKey() { + return alwaysUseTextKey; + } + + public void setAlwaysUseTextKey(Boolean alwaysUseTextKey) { + this.alwaysUseTextKey = alwaysUseTextKey; + } + + public ObservabilityPipelineParseXMLProcessor attrPrefix(String attrPrefix) { + this.attrPrefix = attrPrefix; + return this; + } + + /** + * The prefix to use for XML attributes in the parsed output. + * + * @return attrPrefix + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ATTR_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getAttrPrefix() { + return attrPrefix; + } + + public void setAttrPrefix(String attrPrefix) { + this.attrPrefix = attrPrefix; + } + + public ObservabilityPipelineParseXMLProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineParseXMLProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineParseXMLProcessor field(String field) { + this.field = field; + return this; + } + + /** + * The name of the log field that contains an XML string. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineParseXMLProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineParseXMLProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineParseXMLProcessor includeAttr(Boolean includeAttr) { + this.includeAttr = includeAttr; + return this; + } + + /** + * Whether to include XML attributes in the parsed output. + * + * @return includeAttr + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_INCLUDE_ATTR) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getIncludeAttr() { + return includeAttr; + } + + public void setIncludeAttr(Boolean includeAttr) { + this.includeAttr = includeAttr; + } + + public ObservabilityPipelineParseXMLProcessor parseBool(Boolean parseBool) { + this.parseBool = parseBool; + return this; + } + + /** + * Whether to parse boolean values from strings. + * + * @return parseBool + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_BOOL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseBool() { + return parseBool; + } + + public void setParseBool(Boolean parseBool) { + this.parseBool = parseBool; + } + + public ObservabilityPipelineParseXMLProcessor parseNull(Boolean parseNull) { + this.parseNull = parseNull; + return this; + } + + /** + * Whether to parse null values. + * + * @return parseNull + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_NULL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseNull() { + return parseNull; + } + + public void setParseNull(Boolean parseNull) { + this.parseNull = parseNull; + } + + public ObservabilityPipelineParseXMLProcessor parseNumber(Boolean parseNumber) { + this.parseNumber = parseNumber; + return this; + } + + /** + * Whether to parse numeric values from strings. + * + * @return parseNumber + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_NUMBER) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseNumber() { + return parseNumber; + } + + public void setParseNumber(Boolean parseNumber) { + this.parseNumber = parseNumber; + } + + public ObservabilityPipelineParseXMLProcessor textKey(String textKey) { + this.textKey = textKey; + return this; + } + + /** + * The key name to use for text content within XML elements. Must be at least 1 character if + * specified. + * + * @return textKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TEXT_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getTextKey() { + return textKey; + } + + public void setTextKey(String textKey) { + this.textKey = textKey; + } + + public ObservabilityPipelineParseXMLProcessor type( + ObservabilityPipelineParseXMLProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be parse_xml. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineParseXMLProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineParseXMLProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseXMLProcessor + */ + @JsonAnySetter + public ObservabilityPipelineParseXMLProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineParseXMLProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseXMLProcessor observabilityPipelineParseXmlProcessor = + (ObservabilityPipelineParseXMLProcessor) o; + return Objects.equals( + this.alwaysUseTextKey, observabilityPipelineParseXmlProcessor.alwaysUseTextKey) + && Objects.equals(this.attrPrefix, observabilityPipelineParseXmlProcessor.attrPrefix) + && Objects.equals(this.displayName, observabilityPipelineParseXmlProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineParseXmlProcessor.enabled) + && Objects.equals(this.field, observabilityPipelineParseXmlProcessor.field) + && Objects.equals(this.id, observabilityPipelineParseXmlProcessor.id) + && Objects.equals(this.include, observabilityPipelineParseXmlProcessor.include) + && Objects.equals(this.includeAttr, observabilityPipelineParseXmlProcessor.includeAttr) + && Objects.equals(this.parseBool, observabilityPipelineParseXmlProcessor.parseBool) + && Objects.equals(this.parseNull, observabilityPipelineParseXmlProcessor.parseNull) + && Objects.equals(this.parseNumber, observabilityPipelineParseXmlProcessor.parseNumber) + && Objects.equals(this.textKey, observabilityPipelineParseXmlProcessor.textKey) + && Objects.equals(this.type, observabilityPipelineParseXmlProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineParseXmlProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + alwaysUseTextKey, + attrPrefix, + displayName, + enabled, + field, + id, + include, + includeAttr, + parseBool, + parseNull, + parseNumber, + textKey, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseXMLProcessor {\n"); + sb.append(" alwaysUseTextKey: ").append(toIndentedString(alwaysUseTextKey)).append("\n"); + sb.append(" attrPrefix: ").append(toIndentedString(attrPrefix)).append("\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" includeAttr: ").append(toIndentedString(includeAttr)).append("\n"); + sb.append(" parseBool: ").append(toIndentedString(parseBool)).append("\n"); + sb.append(" parseNull: ").append(toIndentedString(parseNull)).append("\n"); + sb.append(" parseNumber: ").append(toIndentedString(parseNumber)).append("\n"); + sb.append(" textKey: ").append(toIndentedString(textKey)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java new file mode 100644 index 00000000000..8101a254703 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be parse_xml. */ +@JsonSerialize( + using = + ObservabilityPipelineParseXMLProcessorType + .ObservabilityPipelineParseXMLProcessorTypeSerializer.class) +public class ObservabilityPipelineParseXMLProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("parse_xml")); + + public static final ObservabilityPipelineParseXMLProcessorType PARSE_XML = + new ObservabilityPipelineParseXMLProcessorType("parse_xml"); + + ObservabilityPipelineParseXMLProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineParseXMLProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineParseXMLProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineParseXMLProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineParseXMLProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineParseXMLProcessorType fromValue(String value) { + return new ObservabilityPipelineParseXMLProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java deleted file mode 100644 index 6ec67b62962..00000000000 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. - * This product includes software developed at Datadog (https://www.datadoghq.com/). - * Copyright 2019-Present Datadog, Inc. - */ - -package com.datadog.api.client.v2.model; - -import com.datadog.api.client.ModelEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.fasterxml.jackson.databind.ser.std.StdSerializer; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - -/** SASL mechanism used for Kafka authentication. */ -@JsonSerialize( - using = - ObservabilityPipelinePipelineKafkaSourceSaslMechanism - .ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer.class) -public class ObservabilityPipelinePipelineKafkaSourceSaslMechanism extends ModelEnum { - - private static final Set allowedValues = - new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512")); - - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism PLAIN = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("PLAIN"); - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_256 = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256"); - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_512 = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512"); - - ObservabilityPipelinePipelineKafkaSourceSaslMechanism(String value) { - super(value, allowedValues); - } - - public static class ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer - extends StdSerializer { - public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer( - Class t) { - super(t); - } - - public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer() { - this(null); - } - - @Override - public void serialize( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism value, - JsonGenerator jgen, - SerializerProvider provider) - throws IOException, JsonProcessingException { - jgen.writeObject(value.value); - } - } - - @JsonCreator - public static ObservabilityPipelinePipelineKafkaSourceSaslMechanism fromValue(String value) { - return new ObservabilityPipelinePipelineKafkaSourceSaslMechanism(value); - } -} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java index 20442a9ea24..086ae007751 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java @@ -35,6 +35,7 @@ ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_OVERFLOW_ACTION, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_OVERRIDES, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_PARTITION_FIELDS, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_TOO_MANY_BUCKETS_ACTION, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_TYPE }) @jakarta.annotation.Generated( @@ -75,6 +76,9 @@ public class ObservabilityPipelineQuotaProcessor { public static final String JSON_PROPERTY_PARTITION_FIELDS = "partition_fields"; private List partitionFields = null; + public static final String JSON_PROPERTY_TOO_MANY_BUCKETS_ACTION = "too_many_buckets_action"; + private ObservabilityPipelineQuotaProcessorOverflowAction tooManyBucketsAction; + public static final String JSON_PROPERTY_TYPE = "type"; private ObservabilityPipelineQuotaProcessorType type = ObservabilityPipelineQuotaProcessorType.QUOTA; @@ -128,9 +132,10 @@ public ObservabilityPipelineQuotaProcessor dropEvents(Boolean dropEvents) { } /** - * If set to true, logs that matched the quota filter and sent after the quota has - * been met are dropped; only logs that did not match the filter query continue through the - * pipeline. + * If set to true, logs that match the quota filter and are sent after the quota is + * exceeded are dropped. Logs that do not match the filter continue through the pipeline. + * Note: You can set either drop_events or overflow_action + * , but not both. * * @return dropEvents */ @@ -279,9 +284,9 @@ public ObservabilityPipelineQuotaProcessor overflowAction( } /** - * The action to take when the quota is exceeded. Options: - drop: Drop the event. - - * no_action: Let the event pass through. - overflow_routing: Route to - * an overflow destination. + * The action to take when the quota or bucket limit is exceeded. Options: - drop: + * Drop the event. - no_action: Let the event pass through. - overflow_routing + * : Route to an overflow destination. * * @return overflowAction */ @@ -365,6 +370,35 @@ public void setPartitionFields(List partitionFields) { this.partitionFields = partitionFields; } + public ObservabilityPipelineQuotaProcessor tooManyBucketsAction( + ObservabilityPipelineQuotaProcessorOverflowAction tooManyBucketsAction) { + this.tooManyBucketsAction = tooManyBucketsAction; + this.unparsed |= !tooManyBucketsAction.isValid(); + return this; + } + + /** + * The action to take when the quota or bucket limit is exceeded. Options: - drop: + * Drop the event. - no_action: Let the event pass through. - overflow_routing + * : Route to an overflow destination. + * + * @return tooManyBucketsAction + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TOO_MANY_BUCKETS_ACTION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineQuotaProcessorOverflowAction getTooManyBucketsAction() { + return tooManyBucketsAction; + } + + public void setTooManyBucketsAction( + ObservabilityPipelineQuotaProcessorOverflowAction tooManyBucketsAction) { + if (!tooManyBucketsAction.isValid()) { + this.unparsed = true; + } + this.tooManyBucketsAction = tooManyBucketsAction; + } + public ObservabilityPipelineQuotaProcessor type(ObservabilityPipelineQuotaProcessorType type) { this.type = type; this.unparsed |= !type.isValid(); @@ -459,6 +493,8 @@ public boolean equals(Object o) { && Objects.equals(this.overflowAction, observabilityPipelineQuotaProcessor.overflowAction) && Objects.equals(this.overrides, observabilityPipelineQuotaProcessor.overrides) && Objects.equals(this.partitionFields, observabilityPipelineQuotaProcessor.partitionFields) + && Objects.equals( + this.tooManyBucketsAction, observabilityPipelineQuotaProcessor.tooManyBucketsAction) && Objects.equals(this.type, observabilityPipelineQuotaProcessor.type) && Objects.equals( this.additionalProperties, observabilityPipelineQuotaProcessor.additionalProperties); @@ -478,6 +514,7 @@ public int hashCode() { overflowAction, overrides, partitionFields, + tooManyBucketsAction, type, additionalProperties); } @@ -499,6 +536,9 @@ public String toString() { sb.append(" overflowAction: ").append(toIndentedString(overflowAction)).append("\n"); sb.append(" overrides: ").append(toIndentedString(overrides)).append("\n"); sb.append(" partitionFields: ").append(toIndentedString(partitionFields)).append("\n"); + sb.append(" tooManyBucketsAction: ") + .append(toIndentedString(tooManyBucketsAction)) + .append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java index ddb04343a50..953d483a274 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java @@ -19,9 +19,9 @@ import java.util.Set; /** - * The action to take when the quota is exceeded. Options: - drop: Drop the event. - - * no_action: Let the event pass through. - overflow_routing: Route to an - * overflow destination. + * The action to take when the quota or bucket limit is exceeded. Options: - drop: Drop + * the event. - no_action: Let the event pass through. - overflow_routing: + * Route to an overflow destination. */ @JsonSerialize( using = diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java index 0c555e38a0e..a19053e4498 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java @@ -13,7 +13,9 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Objects; @@ -21,10 +23,10 @@ @JsonPropertyOrder({ ObservabilityPipelineSampleProcessor.JSON_PROPERTY_DISPLAY_NAME, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_GROUP_BY, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_ID, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_INCLUDE, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_PERCENTAGE, - ObservabilityPipelineSampleProcessor.JSON_PROPERTY_RATE, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_TYPE }) @jakarta.annotation.Generated( @@ -37,6 +39,9 @@ public class ObservabilityPipelineSampleProcessor { public static final String JSON_PROPERTY_ENABLED = "enabled"; private Boolean enabled; + public static final String JSON_PROPERTY_GROUP_BY = "group_by"; + private List groupBy = null; + public static final String JSON_PROPERTY_ID = "id"; private String id; @@ -46,9 +51,6 @@ public class ObservabilityPipelineSampleProcessor { public static final String JSON_PROPERTY_PERCENTAGE = "percentage"; private Double percentage; - public static final String JSON_PROPERTY_RATE = "rate"; - private Long rate; - public static final String JSON_PROPERTY_TYPE = "type"; private ObservabilityPipelineSampleProcessorType type = ObservabilityPipelineSampleProcessorType.SAMPLE; @@ -60,11 +62,13 @@ public ObservabilityPipelineSampleProcessor( @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_PERCENTAGE) Double percentage, @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) ObservabilityPipelineSampleProcessorType type) { this.enabled = enabled; this.id = id; this.include = include; + this.percentage = percentage; this.type = type; this.unparsed |= !type.isValid(); } @@ -110,6 +114,35 @@ public void setEnabled(Boolean enabled) { this.enabled = enabled; } + public ObservabilityPipelineSampleProcessor groupBy(List groupBy) { + this.groupBy = groupBy; + return this; + } + + public ObservabilityPipelineSampleProcessor addGroupByItem(String groupByItem) { + if (this.groupBy == null) { + this.groupBy = new ArrayList<>(); + } + this.groupBy.add(groupByItem); + return this; + } + + /** + * Optional list of fields to group events by. Each group is sampled independently. + * + * @return groupBy + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GROUP_BY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getGroupBy() { + return groupBy; + } + + public void setGroupBy(List groupBy) { + this.groupBy = groupBy; + } + public ObservabilityPipelineSampleProcessor id(String id) { this.id = id; return this; @@ -161,9 +194,8 @@ public ObservabilityPipelineSampleProcessor percentage(Double percentage) { * * @return percentage */ - @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_PERCENTAGE) - @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) public Double getPercentage() { return percentage; } @@ -172,27 +204,6 @@ public void setPercentage(Double percentage) { this.percentage = percentage; } - public ObservabilityPipelineSampleProcessor rate(Long rate) { - this.rate = rate; - return this; - } - - /** - * Number of events to sample (1 in N). minimum: 1 - * - * @return rate - */ - @jakarta.annotation.Nullable - @JsonProperty(JSON_PROPERTY_RATE) - @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public Long getRate() { - return rate; - } - - public void setRate(Long rate) { - this.rate = rate; - } - public ObservabilityPipelineSampleProcessor type(ObservabilityPipelineSampleProcessorType type) { this.type = type; this.unparsed |= !type.isValid(); @@ -276,10 +287,10 @@ public boolean equals(Object o) { (ObservabilityPipelineSampleProcessor) o; return Objects.equals(this.displayName, observabilityPipelineSampleProcessor.displayName) && Objects.equals(this.enabled, observabilityPipelineSampleProcessor.enabled) + && Objects.equals(this.groupBy, observabilityPipelineSampleProcessor.groupBy) && Objects.equals(this.id, observabilityPipelineSampleProcessor.id) && Objects.equals(this.include, observabilityPipelineSampleProcessor.include) && Objects.equals(this.percentage, observabilityPipelineSampleProcessor.percentage) - && Objects.equals(this.rate, observabilityPipelineSampleProcessor.rate) && Objects.equals(this.type, observabilityPipelineSampleProcessor.type) && Objects.equals( this.additionalProperties, observabilityPipelineSampleProcessor.additionalProperties); @@ -288,7 +299,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - displayName, enabled, id, include, percentage, rate, type, additionalProperties); + displayName, enabled, groupBy, id, include, percentage, type, additionalProperties); } @Override @@ -297,10 +308,10 @@ public String toString() { sb.append("class ObservabilityPipelineSampleProcessor {\n"); sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" groupBy: ").append(toIndentedString(groupBy)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" include: ").append(toIndentedString(include)).append("\n"); sb.append(" percentage: ").append(toIndentedString(percentage)).append("\n"); - sb.append(" rate: ").append(toIndentedString(rate)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java index e74c5ffd699..b197df9ed19 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java @@ -19,12 +19,16 @@ /** Options for defining a custom regex pattern. */ @JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.JSON_PROPERTY_DESCRIPTION, ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.JSON_PROPERTY_RULE }) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") public class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions { @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DESCRIPTION = "description"; + private String description; + public static final String JSON_PROPERTY_RULE = "rule"; private String rule; @@ -36,6 +40,28 @@ public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions( this.rule = rule; } + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions description( + String description) { + this.description = description; + return this; + } + + /** + * Human-readable description providing context about a sensitive data scanner rule + * + * @return description + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DESCRIPTION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions rule(String rule) { this.rule = rule; return this; @@ -119,6 +145,9 @@ public boolean equals(Object o) { observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions = (ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions) o; return Objects.equals( + this.description, + observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.description) + && Objects.equals( this.rule, observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.rule) && Objects.equals( this.additionalProperties, @@ -128,13 +157,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(rule, additionalProperties); + return Objects.hash(description, rule, additionalProperties); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions {\n"); + sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java index 76f9882783c..35fdd21bbad 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java @@ -19,6 +19,7 @@ /** Options for selecting a predefined library pattern and enabling keyword support. */ @JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.JSON_PROPERTY_DESCRIPTION, ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.JSON_PROPERTY_ID, ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions .JSON_PROPERTY_USE_RECOMMENDED_KEYWORDS @@ -27,6 +28,9 @@ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") public class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions { @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DESCRIPTION = "description"; + private String description; + public static final String JSON_PROPERTY_ID = "id"; private String id; @@ -41,6 +45,28 @@ public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions( this.id = id; } + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions description( + String description) { + this.description = description; + return this; + } + + /** + * Human-readable description providing context about a sensitive data scanner rule + * + * @return description + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DESCRIPTION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions id(String id) { this.id = id; return this; @@ -146,6 +172,9 @@ public boolean equals(Object o) { observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions = (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions) o; return Objects.equals( + this.description, + observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.description) + && Objects.equals( this.id, observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.id) && Objects.equals( this.useRecommendedKeywords, @@ -159,13 +188,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(id, useRecommendedKeywords, additionalProperties); + return Objects.hash(description, id, useRecommendedKeywords, additionalProperties); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions {\n"); + sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" useRecommendedKeywords: ") .append(toIndentedString(useRecommendedKeywords)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java new file mode 100644 index 00000000000..e6487f59fd8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java @@ -0,0 +1,314 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The split_array processor splits array fields into separate events based on + * configured rules. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ARRAYS, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplitArrayProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ARRAYS = "arrays"; + private List arrays = new ArrayList<>(); + + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplitArrayProcessorType type = + ObservabilityPipelineSplitArrayProcessorType.SPLIT_ARRAY; + + public ObservabilityPipelineSplitArrayProcessor() {} + + @JsonCreator + public ObservabilityPipelineSplitArrayProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ARRAYS) + List arrays, + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplitArrayProcessorType type) { + this.arrays = arrays; + this.enabled = enabled; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplitArrayProcessor arrays( + List arrays) { + this.arrays = arrays; + for (ObservabilityPipelineSplitArrayProcessorArrayConfig item : arrays) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineSplitArrayProcessor addArraysItem( + ObservabilityPipelineSplitArrayProcessorArrayConfig arraysItem) { + this.arrays.add(arraysItem); + this.unparsed |= arraysItem.unparsed; + return this; + } + + /** + * A list of array split configurations. + * + * @return arrays + */ + @JsonProperty(JSON_PROPERTY_ARRAYS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getArrays() { + return arrays; + } + + public void setArrays(List arrays) { + this.arrays = arrays; + } + + public ObservabilityPipelineSplitArrayProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineSplitArrayProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineSplitArrayProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplitArrayProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. For split_array, + * this should typically be *. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineSplitArrayProcessor type( + ObservabilityPipelineSplitArrayProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be split_array. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplitArrayProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplitArrayProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplitArrayProcessor + */ + @JsonAnySetter + public ObservabilityPipelineSplitArrayProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplitArrayProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplitArrayProcessor observabilityPipelineSplitArrayProcessor = + (ObservabilityPipelineSplitArrayProcessor) o; + return Objects.equals(this.arrays, observabilityPipelineSplitArrayProcessor.arrays) + && Objects.equals(this.displayName, observabilityPipelineSplitArrayProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineSplitArrayProcessor.enabled) + && Objects.equals(this.id, observabilityPipelineSplitArrayProcessor.id) + && Objects.equals(this.include, observabilityPipelineSplitArrayProcessor.include) + && Objects.equals(this.type, observabilityPipelineSplitArrayProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplitArrayProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(arrays, displayName, enabled, id, include, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplitArrayProcessor {\n"); + sb.append(" arrays: ").append(toIndentedString(arrays)).append("\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java new file mode 100644 index 00000000000..fab0f28e287 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java @@ -0,0 +1,180 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for a single array split operation. */ +@JsonPropertyOrder({ + ObservabilityPipelineSplitArrayProcessorArrayConfig.JSON_PROPERTY_FIELD, + ObservabilityPipelineSplitArrayProcessorArrayConfig.JSON_PROPERTY_INCLUDE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplitArrayProcessorArrayConfig { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public ObservabilityPipelineSplitArrayProcessorArrayConfig() {} + + @JsonCreator + public ObservabilityPipelineSplitArrayProcessorArrayConfig( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include) { + this.field = field; + this.include = include; + } + + public ObservabilityPipelineSplitArrayProcessorArrayConfig field(String field) { + this.field = field; + return this; + } + + /** + * The path to the array field to split. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineSplitArrayProcessorArrayConfig include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this array split operation targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplitArrayProcessorArrayConfig + */ + @JsonAnySetter + public ObservabilityPipelineSplitArrayProcessorArrayConfig putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSplitArrayProcessorArrayConfig object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplitArrayProcessorArrayConfig + observabilityPipelineSplitArrayProcessorArrayConfig = + (ObservabilityPipelineSplitArrayProcessorArrayConfig) o; + return Objects.equals(this.field, observabilityPipelineSplitArrayProcessorArrayConfig.field) + && Objects.equals(this.include, observabilityPipelineSplitArrayProcessorArrayConfig.include) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplitArrayProcessorArrayConfig.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, include, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplitArrayProcessorArrayConfig {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java new file mode 100644 index 00000000000..ec68b2c2563 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be split_array. */ +@JsonSerialize( + using = + ObservabilityPipelineSplitArrayProcessorType + .ObservabilityPipelineSplitArrayProcessorTypeSerializer.class) +public class ObservabilityPipelineSplitArrayProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("split_array")); + + public static final ObservabilityPipelineSplitArrayProcessorType SPLIT_ARRAY = + new ObservabilityPipelineSplitArrayProcessorType("split_array"); + + ObservabilityPipelineSplitArrayProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplitArrayProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplitArrayProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplitArrayProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplitArrayProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplitArrayProcessorType fromValue(String value) { + return new ObservabilityPipelineSplitArrayProcessorType(value); + } +} diff --git a/src/test/resources/com/datadog/api/client/v2/api/given.json b/src/test/resources/com/datadog/api/client/v2/api/given.json index aa08181879c..3a01a060c5b 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/given.json +++ b/src/test/resources/com/datadog/api/client/v2/api/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processor_groups\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature index c43fa8b3b76..afe602c3ba9 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature +++ b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature @@ -12,7 +12,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Bad Request" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -28,7 +28,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "OK" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 201 OK And the response "data" has field "id" @@ -106,7 +106,7 @@ Feature: Observability Pipelines And new "UpdatePipeline" request And there is a valid "pipeline" in the system And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -124,7 +124,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter with value "3fa85f64-5717-4562-b3fc-2c963f66afa6" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 404 Not Found @@ -134,7 +134,7 @@ Feature: Observability Pipelines And there is a valid "pipeline" in the system And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "data" has field "id" @@ -149,7 +149,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "Bad Request" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request And the response "errors[0].title" is equal to "Field 'include' is required" @@ -161,7 +161,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "OK" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "errors" has length 0 diff --git a/src/test/resources/com/datadog/api/client/v2/api/undo.json b/src/test/resources/com/datadog/api/client/v2/api/undo.json index eb43f26dbec..25c59473dad 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/undo.json +++ b/src/test/resources/com/datadog/api/client/v2/api/undo.json @@ -2838,6 +2838,31 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3443,31 +3468,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, "DeletePipeline": { "tag": "Observability Pipelines", "undo": {