From 14de1aa34475c25a7dd9a16f31ad21eb6f7d491b Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Thu, 8 Jan 2026 14:22:53 +0000 Subject: [PATCH] Regenerate client from commit 656ed5b of spec repo --- .generator/schemas/v2/openapi.yaml | 1652 +++++++++++++---- .../com/datadog/api/client/ApiClient.java | 12 +- .../v2/api/ObservabilityPipelinesApi.java | 24 +- .../v2/model/AzureStorageDestination.java | 6 +- .../model/MicrosoftSentinelDestination.java | 6 +- ...ervabilityPipelineAddEnvVarsProcessor.java | 6 +- ...servabilityPipelineAddFieldsProcessor.java | 6 +- ...rvabilityPipelineAddHostnameProcessor.java | 272 +++ ...ilityPipelineAddHostnameProcessorType.java | 63 + ...ilityPipelineAmazonDataFirehoseSource.java | 6 +- ...tyPipelineAmazonOpenSearchDestination.java | 6 +- ...ervabilityPipelineAmazonS3Destination.java | 2 + .../ObservabilityPipelineAmazonS3Source.java | 2 + ...PipelineAmazonSecurityLakeDestination.java | 6 +- ...rvabilityPipelineCloudPremDestination.java | 223 +++ ...ilityPipelineCloudPremDestinationType.java | 62 + .../v2/model/ObservabilityPipelineConfig.java | 35 +- ...vabilityPipelineConfigDestinationItem.java | 1345 +++++++++----- ...servabilityPipelineConfigPipelineType.java | 65 + ...ervabilityPipelineConfigProcessorItem.java | 1256 ++++++++----- ...ObservabilityPipelineConfigSourceItem.java | 928 ++++----- ...lineCrowdStrikeNextGenSiemDestination.java | 2 + .../ObservabilityPipelineCustomProcessor.java | 2 + ...servabilityPipelineDatadogAgentSource.java | 6 +- ...abilityPipelineDatadogLogsDestination.java | 6 +- ...lityPipelineDatadogMetricsDestination.java | 224 +++ ...PipelineDatadogMetricsDestinationType.java | 63 + ...rvabilityPipelineDatadogTagsProcessor.java | 2 + .../ObservabilityPipelineDedupeProcessor.java | 6 +- ...ilityPipelineElasticsearchDestination.java | 37 +- ...ineElasticsearchDestinationDataStream.java | 203 ++ ...ilityPipelineEnrichmentTableProcessor.java | 48 +- ...PipelineEnrichmentTableReferenceTable.java | 219 +++ .../ObservabilityPipelineFilterProcessor.java | 11 +- .../ObservabilityPipelineFluentBitSource.java | 6 +- .../ObservabilityPipelineFluentdSource.java | 6 +- ...ilityPipelineGenerateMetricsProcessor.java | 2 + ...ityPipelineGoogleChronicleDestination.java | 6 +- ...PipelineGoogleCloudStorageDestination.java | 2 + ...bilityPipelineGooglePubSubDestination.java | 6 +- ...servabilityPipelineGooglePubSubSource.java | 6 +- ...vabilityPipelineHttpClientDestination.java | 350 ++++ ...lineHttpClientDestinationAuthStrategy.java | 67 + ...elineHttpClientDestinationCompression.java | 159 ++ ...ClientDestinationCompressionAlgorithm.java | 64 + ...PipelineHttpClientDestinationEncoding.java | 62 + ...lityPipelineHttpClientDestinationType.java | 63 + ...ObservabilityPipelineHttpClientSource.java | 6 +- ...yPipelineHttpClientSourceAuthStrategy.java | 4 +- ...ObservabilityPipelineHttpServerSource.java | 6 +- ...ObservabilityPipelineKafkaDestination.java | 599 ++++++ ...tyPipelineKafkaDestinationCompression.java | 71 + ...ilityPipelineKafkaDestinationEncoding.java | 65 + ...rvabilityPipelineKafkaDestinationType.java | 62 + ...abilityPipelineKafkaLibrdkafkaOption.java} | 35 +- ...va => ObservabilityPipelineKafkaSasl.java} | 30 +- ...servabilityPipelineKafkaSaslMechanism.java | 67 + .../ObservabilityPipelineKafkaSource.java | 26 +- .../ObservabilityPipelineLogstashSource.java | 6 +- ...ervabilityPipelineMetricTagsProcessor.java | 315 ++++ ...bilityPipelineMetricTagsProcessorRule.java | 254 +++ ...PipelineMetricTagsProcessorRuleAction.java | 65 + ...tyPipelineMetricTagsProcessorRuleMode.java | 62 + ...bilityPipelineMetricTagsProcessorType.java | 63 + ...ervabilityPipelineNewRelicDestination.java | 6 +- ...ervabilityPipelineOcsfMapperProcessor.java | 2 + ...vabilityPipelineOpenSearchDestination.java | 6 +- ...ervabilityPipelineOpentelemetrySource.java | 276 +++ ...bilityPipelineOpentelemetrySourceType.java | 63 + ...servabilityPipelineParseGrokProcessor.java | 2 + ...servabilityPipelineParseJSONProcessor.java | 2 + ...bservabilityPipelineParseXMLProcessor.java | 505 +++++ ...vabilityPipelineParseXMLProcessorType.java | 62 + ...elinePipelineKafkaSourceSaslMechanism.java | 67 - .../ObservabilityPipelineQuotaProcessor.java | 58 +- ...yPipelineQuotaProcessorOverflowAction.java | 6 +- .../ObservabilityPipelineReduceProcessor.java | 2 + ...vabilityPipelineRemoveFieldsProcessor.java | 6 +- ...vabilityPipelineRenameFieldsProcessor.java | 6 +- ...servabilityPipelineRsyslogDestination.java | 2 + .../ObservabilityPipelineRsyslogSource.java | 2 + .../ObservabilityPipelineSampleProcessor.java | 77 +- ...PipelineSensitiveDataScannerProcessor.java | 2 + ...aScannerProcessorCustomPatternOptions.java | 32 +- ...ScannerProcessorLibraryPatternOptions.java | 32 +- ...abilityPipelineSentinelOneDestination.java | 6 +- ...bservabilityPipelineSocketDestination.java | 6 +- .../ObservabilityPipelineSocketSource.java | 6 +- ...ervabilityPipelineSplitArrayProcessor.java | 316 ++++ ...ipelineSplitArrayProcessorArrayConfig.java | 180 ++ ...bilityPipelineSplitArrayProcessorType.java | 63 + ...rvabilityPipelineSplunkHecDestination.java | 2 + .../ObservabilityPipelineSplunkHecSource.java | 6 +- .../ObservabilityPipelineSplunkTcpSource.java | 2 + ...rvabilityPipelineSumoLogicDestination.java | 6 +- .../ObservabilityPipelineSumoLogicSource.java | 6 +- ...ervabilityPipelineSyslogNgDestination.java | 2 + .../ObservabilityPipelineSyslogNgSource.java | 2 + ...bservabilityPipelineThrottleProcessor.java | 2 + ...peline_returns_Bad_Request_response.freeze | 2 +- ...pipeline_returns_Bad_Request_response.json | 4 +- ..._a_new_pipeline_returns_OK_response.freeze | 2 +- ...te_a_new_pipeline_returns_OK_response.json | 10 +- ...pipeline_returns_Not_Found_response.freeze | 2 +- ...a_pipeline_returns_Not_Found_response.json | 4 +- ...lete_a_pipeline_returns_OK_response.freeze | 2 +- ...Delete_a_pipeline_returns_OK_response.json | 14 +- ...ecific_pipeline_returns_OK_response.freeze | 2 +- ...specific_pipeline_returns_OK_response.json | 16 +- ...elines_returns_Bad_Request_response.freeze | 2 +- ...ipelines_returns_Bad_Request_response.json | 4 +- .../List_pipelines_returns_OK_response.freeze | 2 +- .../List_pipelines_returns_OK_response.json | 16 +- ...peline_returns_Bad_Request_response.freeze | 2 +- ...pipeline_returns_Bad_Request_response.json | 14 +- ...pipeline_returns_Not_Found_response.freeze | 2 +- ...a_pipeline_returns_Not_Found_response.json | 4 +- ...date_a_pipeline_returns_OK_response.freeze | 2 +- ...Update_a_pipeline_returns_OK_response.json | 16 +- ...peline_returns_Bad_Request_response.freeze | 2 +- ...pipeline_returns_Bad_Request_response.json | 4 +- ...bility_pipeline_returns_OK_response.freeze | 2 +- ...vability_pipeline_returns_OK_response.json | 4 +- .../com/datadog/api/client/v2/api/given.json | 24 +- .../v2/api/observability_pipelines.feature | 4 +- .../com/datadog/api/client/v2/api/undo.json | 86 +- 126 files changed, 9314 insertions(+), 2102 deletions(-) create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigPipelineType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestinationType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationAuthStrategy.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompression.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationEncoding.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java rename src/main/java/com/datadog/api/client/v2/model/{ObservabilityPipelineKafkaSourceLibrdkafkaOption.java => ObservabilityPipelineKafkaLibrdkafkaOption.java} (78%) rename src/main/java/com/datadog/api/client/v2/model/{ObservabilityPipelineKafkaSourceSasl.java => ObservabilityPipelineKafkaSasl.java} (78%) create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessor.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRule.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleAction.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleMode.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java delete mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java create mode 100644 src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 8a53c755c88..1366d36e295 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6677,8 +6677,11 @@ components: x-enum-varnames: - AZURE_SCAN_OPTIONS AzureStorageDestination: - description: The `azure_storage` destination forwards logs to an Azure Blob + description: 'The `azure_storage` destination forwards logs to an Azure Blob Storage container. + + + **Supported pipeline types:** logs' properties: blob_prefix: description: Optional prefix for blobs written to the container. @@ -6709,6 +6712,8 @@ components: - inputs - container_name type: object + x-pipeline-types: + - logs AzureStorageDestinationType: default: azure_storage description: The destination type. The value should always be `azure_storage`. @@ -33515,8 +33520,11 @@ components: - query type: object MicrosoftSentinelDestination: - description: The `microsoft_sentinel` destination forwards logs to Microsoft + description: 'The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + + + **Supported pipeline types:** logs' properties: client_id: description: Azure AD client ID used for authentication. @@ -33557,6 +33565,8 @@ components: - dcr_immutable_id - table type: object + x-pipeline-types: + - logs MicrosoftSentinelDestinationType: default: microsoft_sentinel description: The destination type. The value should always be `microsoft_sentinel`. @@ -35261,8 +35271,11 @@ components: - data type: object ObservabilityPipelineAddEnvVarsProcessor: - description: The `add_env_vars` processor adds environment variable values to - log events. + description: 'The `add_env_vars` processor adds environment variable values + to log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35294,6 +35307,8 @@ components: - variables - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddEnvVarsProcessorType: default: add_env_vars description: The processor type. The value should always be `add_env_vars`. @@ -35319,7 +35334,10 @@ components: - name type: object ObservabilityPipelineAddFieldsProcessor: - description: The `add_fields` processor adds static key-value fields to logs. + description: 'The `add_fields` processor adds static key-value fields to logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35353,6 +35371,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddFieldsProcessorType: default: add_fields description: The processor type. The value should always be `add_fields`. @@ -35362,8 +35382,53 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: 'The `add_hostname` processor adds the hostname to log events. + + + **Supported pipeline types:** logs' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: - description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + description: 'The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35381,6 +35446,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonDataFirehoseSourceType: default: amazon_data_firehose description: The source type. The value should always be `amazon_data_firehose`. @@ -35391,7 +35458,10 @@ components: x-enum-varnames: - AMAZON_DATA_FIREHOSE ObservabilityPipelineAmazonOpenSearchDestination: - description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + description: 'The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' @@ -35419,6 +35489,8 @@ components: - inputs - auth type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonOpenSearchDestinationAuth: description: 'Authentication settings for the Amazon OpenSearch destination. @@ -35462,8 +35534,11 @@ components: x-enum-varnames: - AMAZON_OPENSEARCH ObservabilityPipelineAmazonS3Destination: - description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + description: 'The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35504,6 +35579,8 @@ components: - region - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3DestinationStorageClass: description: S3 storage class. enum: @@ -35540,7 +35617,10 @@ components: ObservabilityPipelineAmazonS3Source: description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. - It supports AWS authentication and TLS encryption.' + It supports AWS authentication and TLS encryption. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35563,6 +35643,8 @@ components: - type - region type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3SourceType: default: amazon_s3 description: The source type. Always `amazon_s3`. @@ -35573,8 +35655,11 @@ components: x-enum-varnames: - AMAZON_S3 ObservabilityPipelineAmazonSecurityLakeDestination: - description: The `amazon_security_lake` destination sends your logs to Amazon + description: 'The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35614,6 +35699,8 @@ components: - region - custom_source_name type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonSecurityLakeDestinationType: default: amazon_security_lake description: The destination type. Always `amazon_security_lake`. @@ -35639,6 +35726,42 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: 'The `cloud_prem` destination sends logs to Datadog CloudPrem. + + + **Supported pipeline types:** logs' + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - logs + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35657,6 +35780,8 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array + pipeline_type: + $ref: '#/components/schemas/ObservabilityPipelineConfigPipelineType' processors: description: A list of processor groups that transform or enrich log data. example: @@ -35693,25 +35818,40 @@ components: ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/AzureStorageDestination' - - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineSocketDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestination' + ObservabilityPipelineConfigPipelineType: + default: logs + description: The type of data being ingested. Defaults to `logs` if not specified. + enum: + - logs + - metrics + example: logs + type: string + x-enum-varnames: + - LOGS + - METRICS ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35785,45 +35925,53 @@ components: description: A processor for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' - - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' - - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: - description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike + description: 'The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + + + **Supported pipeline types:** logs' properties: compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' @@ -35851,6 +35999,8 @@ components: - inputs - encoding type: object + x-pipeline-types: + - logs ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression: description: Compression configuration for log events. properties: @@ -35894,9 +36044,12 @@ components: x-enum-varnames: - CROWDSTRIKE_NEXT_GEN_SIEM ObservabilityPipelineCustomProcessor: - description: The `custom_processor` processor transforms events using [Vector + description: 'The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35930,6 +36083,8 @@ components: - remaps - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineCustomProcessorRemap: description: Defines a single VRL remap rule with its own filtering and transformation logic. @@ -36005,7 +36160,11 @@ components: - config type: object ObservabilityPipelineDatadogAgentSource: - description: The `datadog_agent` source collects logs from the Datadog Agent. + description: 'The `datadog_agent` source collects logs/metrics from the Datadog + Agent. + + + **Supported pipeline types:** logs, metrics' properties: id: description: The unique identifier for this component. Used to reference @@ -36021,6 +36180,9 @@ components: - id - type type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineDatadogAgentSourceType: default: datadog_agent description: The source type. The value should always be `datadog_agent`. @@ -36031,7 +36193,10 @@ components: x-enum-varnames: - DATADOG_AGENT ObservabilityPipelineDatadogLogsDestination: - description: The `datadog_logs` destination forwards logs to Datadog Log Management. + description: 'The `datadog_logs` destination forwards logs to Datadog Log Management. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -36052,6 +36217,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogLogsDestinationType: default: datadog_logs description: The destination type. The value should always be `datadog_logs`. @@ -36061,9 +36228,48 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDatadogMetricsDestination: + description: 'The `datadog_metrics` destination forwards metrics to Datadog. + + + **Supported pipeline types:** metrics' + properties: + id: + description: The unique identifier for this component. + example: datadog-metrics-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - metric-tags-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineDatadogMetricsDestinationType: + default: datadog_metrics + description: The destination type. The value should always be `datadog_metrics`. + enum: + - datadog_metrics + example: datadog_metrics + type: string + x-enum-varnames: + - DATADOG_METRICS ObservabilityPipelineDatadogTagsProcessor: - description: The `datadog_tags` processor includes or excludes specific Datadog + description: 'The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + + + **Supported pipeline types:** logs' properties: action: $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction' @@ -36106,6 +36312,8 @@ components: - keys - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogTagsProcessorAction: description: The action to take on tags with matching keys. enum: @@ -36148,7 +36356,10 @@ components: - DECODE_JSON - DECODE_SYSLOG ObservabilityPipelineDedupeProcessor: - description: The `dedupe` processor removes duplicate fields in log events. + description: 'The `dedupe` processor removes duplicate fields in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36185,6 +36396,8 @@ components: - mode - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDedupeProcessorMode: description: The deduplication mode to apply to the fields. enum: @@ -36205,8 +36418,11 @@ components: x-enum-varnames: - DEDUPE ObservabilityPipelineElasticsearchDestination: - description: The `elasticsearch` destination writes logs to an Elasticsearch + description: 'The `elasticsearch` destination writes logs to an Elasticsearch cluster. + + + **Supported pipeline types:** logs' properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' @@ -36214,6 +36430,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36233,6 +36451,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineElasticsearchDestinationApiVersion: description: The Elasticsearch API version to use. Set to `auto` to auto-detect. enum: @@ -36247,6 +36467,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36386,8 +36623,12 @@ components: - path type: object ObservabilityPipelineEnrichmentTableProcessor: - description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + description: 'The `enrichment_table` processor enriches logs using a static + CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36408,6 +36649,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36421,6 +36664,8 @@ components: - target - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineEnrichmentTableProcessorType: default: enrichment_table description: The processor type. The value should always be `enrichment_table`. @@ -36430,6 +36675,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36446,9 +36713,12 @@ components: - value type: object ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + description: 'The `filter` processor allows conditional processing of logs/metrics + based on a Datadog search query. Logs/metrics that match the `include` query + are passed through; others are discarded. + + + **Supported pipeline types:** logs, metrics' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36463,9 +36733,9 @@ components: example: filter-processor type: string include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. + description: A Datadog search query used to determine which logs/metrics + should pass through the filter. Logs/metrics that match this query continue + to downstream components; others are dropped. example: service:my-service type: string type: @@ -36476,6 +36746,9 @@ components: - include - enabled type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineFilterProcessorType: default: filter description: The processor type. The value should always be `filter`. @@ -36486,7 +36759,10 @@ components: x-enum-varnames: - FILTER ObservabilityPipelineFluentBitSource: - description: The `fluent_bit` source ingests logs from Fluent Bit. + description: 'The `fluent_bit` source ingests logs from Fluent Bit. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36502,6 +36778,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentBitSourceType: default: fluent_bit description: The source type. The value should always be `fluent_bit`. @@ -36512,7 +36790,10 @@ components: x-enum-varnames: - FLUENT_BIT ObservabilityPipelineFluentdSource: - description: The `fluentd` source ingests logs from a Fluentd-compatible service. + description: 'The `fluentd` source ingests logs from a Fluentd-compatible service. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36528,6 +36809,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentdSourceType: default: fluentd description: The source type. The value should always be `fluentd. @@ -36552,7 +36835,10 @@ components: from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by - log fields.' + log fields. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36582,6 +36868,8 @@ components: - type - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineGenerateMetricsProcessorType: default: generate_datadog_metrics description: The processor type. Always `generate_datadog_metrics`. @@ -36676,7 +36964,10 @@ components: - GAUGE - DISTRIBUTION ObservabilityPipelineGoogleChronicleDestination: - description: The `google_chronicle` destination sends logs to Google Chronicle. + description: 'The `google_chronicle` destination sends logs to Google Chronicle. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36710,6 +37001,8 @@ components: - inputs - customer_id type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleChronicleDestinationEncoding: description: The encoding format for the logs sent to Chronicle. enum: @@ -36733,7 +37026,10 @@ components: description: 'The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. - It requires a bucket name, GCP authentication, and metadata fields.' + It requires a bucket name, GCP authentication, and metadata fields. + + + **Supported pipeline types:** logs' properties: acl: $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' @@ -36775,6 +37071,8 @@ components: - bucket - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleCloudStorageDestinationAcl: description: Access control list setting for objects written to the bucket. enum: @@ -36817,8 +37115,11 @@ components: x-enum-varnames: - GOOGLE_CLOUD_STORAGE ObservabilityPipelineGooglePubSubDestination: - description: The `google_pubsub` destination publishes logs to a Google Cloud + description: 'The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36856,6 +37157,8 @@ components: - project - topic type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubDestinationEncoding: description: Encoding format for log events. enum: @@ -36876,8 +37179,11 @@ components: x-enum-varnames: - GOOGLE_PUBSUB ObservabilityPipelineGooglePubSubSource: - description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + description: 'The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36908,6 +37214,8 @@ components: - project - subscription type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubSourceType: default: google_pubsub description: The source type. The value should always be `google_pubsub`. @@ -36917,9 +37225,94 @@ components: type: string x-enum-varnames: - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientDestination: + description: 'The `http_client` destination sends data to an HTTP endpoint. + + + **Supported pipeline types:** logs, metrics' + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationAuthStrategy' + compression: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationEncoding' + id: + description: The unique identifier for this component. + example: http-client-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - filter-processor + items: + type: string + type: array + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationType' + required: + - id + - type + - inputs + - encoding + type: object + x-pipeline-types: + - logs + - metrics + ObservabilityPipelineHttpClientDestinationAuthStrategy: + description: HTTP authentication strategy. + enum: + - none + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - NONE + - BASIC + - BEARER + ObservabilityPipelineHttpClientDestinationCompression: + description: Compression configuration for HTTP requests. + properties: + algorithm: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + required: + - algorithm + type: object + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm: + description: Compression algorithm. + enum: + - gzip + example: gzip + type: string + x-enum-varnames: + - GZIP + ObservabilityPipelineHttpClientDestinationEncoding: + description: Encoding format for log events. + enum: + - json + example: json + type: string + x-enum-varnames: + - JSON + ObservabilityPipelineHttpClientDestinationType: + default: http_client + description: The destination type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT ObservabilityPipelineHttpClientSource: - description: The `http_client` source scrapes logs from HTTP endpoints at regular + description: 'The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' @@ -36950,14 +37343,18 @@ components: - type - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpClientSourceAuthStrategy: description: Optional authentication strategy for HTTP requests. enum: + - none - basic - bearer example: basic type: string x-enum-varnames: + - NONE - BASIC - BEARER ObservabilityPipelineHttpClientSourceType: @@ -36970,8 +37367,11 @@ components: x-enum-varnames: - HTTP_CLIENT ObservabilityPipelineHttpServerSource: - description: The `http_server` source collects logs over HTTP POST from external + description: 'The `http_server` source collects logs over HTTP POST from external services. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' @@ -36991,6 +37391,8 @@ components: - auth_strategy - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpServerSourceAuthStrategy: description: HTTP authentication method. enum: @@ -37010,8 +37412,161 @@ components: type: string x-enum-varnames: - HTTP_SERVER + ObservabilityPipelineKafkaDestination: + description: 'The `kafka` destination sends logs to Apache Kafka topics. + + + **Supported pipeline types:** logs' + properties: + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers + type: string + id: + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id + type: string + librdkafka_options: + description: Optional list of advanced Kafka producer configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' + required: + - id + - type + - inputs + - topic + - encoding + type: object + x-pipeline-types: + - logs + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' + type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + description: 'The `kafka` source ingests data from Apache Kafka topics. + + + **Supported pipeline types:** logs' properties: group_id: description: Consumer group ID used by the Kafka client. @@ -37027,10 +37582,10 @@ components: description: Optional list of advanced Kafka client configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' topics: @@ -37050,30 +37605,8 @@ components: - group_id - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. - properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' - type: string - required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' - type: object + x-pipeline-types: + - logs ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -37084,7 +37617,10 @@ components: x-enum-varnames: - KAFKA ObservabilityPipelineLogstashSource: - description: The `logstash` source ingests logs from a Logstash forwarder. + description: 'The `logstash` source ingests logs from a Logstash forwarder. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -37100,6 +37636,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineLogstashSourceType: default: logstash description: The source type. The value should always be `logstash`. @@ -37124,13 +37662,112 @@ components: - name - value type: object + ObservabilityPipelineMetricTagsProcessor: + description: 'The `metric_tags` processor filters metrics based on their tags + using Datadog tag key patterns. + + + **Supported pipeline types:** metrics' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: metric-tags-processor + type: string + include: + description: A Datadog search query used to determine which metrics this + processor targets. + example: '*' + type: string + rules: + description: A list of rules for filtering metric tags. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRule' + maxItems: 100 + minItems: 1 + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorType' + required: + - id + - type + - include + - rules + - enabled + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineMetricTagsProcessorRule: + description: Defines a rule for filtering metric tags based on key patterns. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleAction' + include: + description: A Datadog search query used to determine which metrics this + rule targets. + example: '*' + type: string + keys: + description: A list of tag keys to include or exclude. + example: + - env + - service + - version + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleMode' + required: + - include + - mode + - action + - keys + type: object + ObservabilityPipelineMetricTagsProcessorRuleAction: + description: The action to take on tags with matching keys. + enum: + - include + - exclude + example: include + type: string + x-enum-varnames: + - INCLUDE + - EXCLUDE + ObservabilityPipelineMetricTagsProcessorRuleMode: + description: The processing mode for tag filtering. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineMetricTagsProcessorType: + default: metric_tags + description: The processor type. The value should always be `metric_tags`. + enum: + - metric_tags + example: metric_tags + type: string + x-enum-varnames: + - METRIC_TAGS ObservabilityPipelineMetricValue: description: Specifies how the value of the generated metric is computed. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' ObservabilityPipelineNewRelicDestination: - description: The `new_relic` destination sends logs to the New Relic platform. + description: 'The `new_relic` destination sends logs to the New Relic platform. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37154,6 +37791,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineNewRelicDestinationRegion: description: The New Relic region. enum: @@ -37174,8 +37813,11 @@ components: x-enum-varnames: - NEW_RELIC ObservabilityPipelineOcsfMapperProcessor: - description: The `ocsf_mapper` processor transforms logs into the OCSF schema + description: 'The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37207,6 +37849,8 @@ components: - mappings - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineOcsfMapperProcessorMapping: description: Defines how specific events are transformed to OCSF using a mapping configuration. @@ -37266,7 +37910,10 @@ components: - OKTA_SYSTEM_LOG_AUTHENTICATION - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC ObservabilityPipelineOpenSearchDestination: - description: The `opensearch` destination writes logs to an OpenSearch cluster. + description: 'The `opensearch` destination writes logs to an OpenSearch cluster. + + + **Supported pipeline types:** logs' properties: bulk_index: description: The index to write logs to. @@ -37291,6 +37938,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineOpenSearchDestinationType: default: opensearch description: The destination type. The value should always be `opensearch`. @@ -37300,9 +37949,56 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: 'The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + + + **Supported pipeline types:** logs' + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + x-pipeline-types: + - logs + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: - description: The `parse_grok` processor extracts structured fields from unstructured + description: 'The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + + + **Supported pipeline types:** logs' properties: disable_library_rules: default: false @@ -37341,6 +38037,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseGrokProcessorRule: description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule defines how to extract structured fields @@ -37420,9 +38118,12 @@ components: x-enum-varnames: - PARSE_GROK ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field + description: 'The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37454,6 +38155,8 @@ components: - field - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseJSONProcessorType: default: parse_json description: The processor type. The value should always be `parse_json`. @@ -37463,28 +38166,92 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: 'The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + + + **Supported pipeline types:** logs' + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match + description: 'The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: @@ -37528,6 +38295,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37538,6 +38307,8 @@ components: - limit - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineQuotaProcessorLimit: description: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. @@ -37565,7 +38336,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37609,8 +38381,11 @@ components: x-enum-varnames: - QUOTA ObservabilityPipelineReduceProcessor: - description: The `reduce` processor aggregates and merges logs based on matching + description: 'The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37651,6 +38426,8 @@ components: - merge_strategies - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineReduceProcessorMergeStrategy: description: Defines how a specific field should be merged across grouped events. properties: @@ -37704,7 +38481,10 @@ components: x-enum-varnames: - REDUCE ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + description: 'The `remove_fields` processor deletes specified fields from logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37740,6 +38520,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRemoveFieldsProcessorType: default: remove_fields description: The processor type. The value should always be `remove_fields`. @@ -37750,7 +38532,10 @@ components: x-enum-varnames: - REMOVE_FIELDS ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + description: 'The `rename_fields` processor changes field names. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37785,6 +38570,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRenameFieldsProcessorField: description: Defines how to rename a field in log events. properties: @@ -37816,8 +38603,11 @@ components: x-enum-varnames: - RENAME_FIELDS ObservabilityPipelineRsyslogDestination: - description: The `rsyslog` destination forwards logs to an external `rsyslog` + description: 'The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37846,6 +38636,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogDestinationType: default: rsyslog description: The destination type. The value should always be `rsyslog`. @@ -37856,8 +38648,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineRsyslogSource: - description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` - server using the syslog protocol. + description: 'The `rsyslog` source listens for logs over TCP or UDP from an + `rsyslog` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -37876,6 +38671,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogSourceType: default: rsyslog description: The source type. The value should always be `rsyslog`. @@ -37886,8 +38683,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineSampleProcessor: - description: The `sample` processor allows probabilistic sampling of logs at + description: 'The `sample` processor allows probabilistic sampling of logs at a fixed rate. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37895,6 +38695,16 @@ components: description: Whether this processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -37911,20 +38721,17 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSampleProcessorType: default: sample description: The processor type. The value should always be `sample`. @@ -37935,8 +38742,11 @@ components: x-enum-varnames: - SAMPLE ObservabilityPipelineSensitiveDataScannerProcessor: - description: The `sensitive_data_scanner` processor detects and optionally redacts - sensitive data in log events. + description: 'The `sensitive_data_scanner` processor detects and optionally + redacts sensitive data in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37970,6 +38780,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSensitiveDataScannerProcessorAction: description: Defines what action to take when sensitive data is matched. oneOf: @@ -38091,6 +38903,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -38146,6 +38963,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38287,7 +39109,10 @@ components: x-enum-varnames: - SENSITIVE_DATA_SCANNER ObservabilityPipelineSentinelOneDestination: - description: The `sentinel_one` destination sends logs to SentinelOne. + description: 'The `sentinel_one` destination sends logs to SentinelOne. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38311,6 +39136,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineSentinelOneDestinationRegion: description: The SentinelOne region to send logs to. enum: @@ -38335,8 +39162,11 @@ components: x-enum-varnames: - SENTINEL_ONE ObservabilityPipelineSocketDestination: - description: The `socket` destination sends logs over TCP or UDP to a remote + description: 'The `socket` destination sends logs over TCP or UDP to a remote server. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' @@ -38369,6 +39199,8 @@ components: - framing - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketDestinationEncoding: description: Encoding format for log events. enum: @@ -38463,7 +39295,10 @@ components: x-enum-varnames: - SOCKET ObservabilityPipelineSocketSource: - description: The `socket` source ingests logs over TCP or UDP. + description: 'The `socket` source ingests logs over TCP or UDP. + + + **Supported pipeline types:** logs' properties: framing: $ref: '#/components/schemas/ObservabilityPipelineSocketSourceFraming' @@ -38486,6 +39321,8 @@ components: - mode - framing type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketSourceFraming: description: Framing method configuration for the socket source. oneOf: @@ -38626,9 +39463,79 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: 'The `split_array` processor splits array fields into separate + events based on configured rules. + + + **Supported pipeline types:** logs' + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: - description: The `splunk_hec` destination forwards logs to Splunk using the + description: 'The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + + + **Supported pipeline types:** logs' properties: auto_extract_timestamp: description: 'If `true`, Splunk tries to extract timestamps from incoming @@ -38668,6 +39575,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecDestinationEncoding: description: Encoding format for log events. enum: @@ -38688,8 +39597,11 @@ components: x-enum-varnames: - SPLUNK_HEC ObservabilityPipelineSplunkHecSource: - description: The `splunk_hec` source implements the Splunk HTTP Event Collector + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38705,6 +39617,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecSourceType: default: splunk_hec description: The source type. Always `splunk_hec`. @@ -38718,7 +39632,10 @@ components: description: 'The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. - TLS is supported for secure transmission.' + TLS is supported for secure transmission. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38734,6 +39651,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkTcpSourceType: default: splunk_tcp description: The source type. Always `splunk_tcp`. @@ -38744,7 +39663,10 @@ components: x-enum-varnames: - SPLUNK_TCP ObservabilityPipelineSumoLogicDestination: - description: The `sumo_logic` destination forwards logs to Sumo Logic. + description: 'The `sumo_logic` destination forwards logs to Sumo Logic. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' @@ -38785,6 +39707,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicDestinationEncoding: description: The output encoding format. enum: @@ -38822,7 +39746,10 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSumoLogicSource: - description: The `sumo_logic` source receives logs from Sumo Logic collectors. + description: 'The `sumo_logic` source receives logs from Sumo Logic collectors. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38836,6 +39763,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicSourceType: default: sumo_logic description: The source type. The value should always be `sumo_logic`. @@ -38846,8 +39775,11 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSyslogNgDestination: - description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + description: 'The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38876,6 +39808,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgDestinationType: default: syslog_ng description: The destination type. The value should always be `syslog_ng`. @@ -38886,8 +39820,11 @@ components: x-enum-varnames: - SYSLOG_NG ObservabilityPipelineSyslogNgSource: - description: The `syslog_ng` source listens for logs over TCP or UDP from a + description: 'The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38906,6 +39843,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgSourceType: default: syslog_ng description: The source type. The value should always be `syslog_ng`. @@ -38926,8 +39865,11 @@ components: - TCP - UDP ObservabilityPipelineThrottleProcessor: - description: The `throttle` processor limits the number of events that pass + description: 'The `throttle` processor limits the number of events that pass through over a given time window. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -38973,6 +39915,8 @@ components: - window - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineThrottleProcessorType: default: throttle description: The processor type. The value should always be `throttle`. @@ -76075,6 +77019,222 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/{pipeline_id}: + delete: + description: Delete a pipeline. + operationId: DeletePipeline + parameters: + - description: The ID of the pipeline to delete. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '204': + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Conflict + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Delete a pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_delete + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + get: + description: Get a specific pipeline by its ID. + operationId: GetPipeline + parameters: + - description: The ID of the pipeline to retrieve. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Get a specific pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + put: + description: Update a pipeline. + operationId: UpdatePipeline + parameters: + - description: The ID of the pipeline to update. + in: path + name: pipeline_id + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Update a pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -79721,222 +80881,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: - delete: - description: Delete a pipeline. - operationId: DeletePipeline - parameters: - - description: The ID of the pipeline to delete. - in: path - name: pipeline_id - required: true - schema: - type: string - responses: - '204': - description: OK - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Conflict - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Delete a pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_delete - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - get: - description: Get a specific pipeline by its ID. - operationId: GetPipeline - parameters: - - description: The ID of the pipeline to retrieve. - in: path - name: pipeline_id - required: true - schema: - type: string - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Get a specific pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - put: - description: Update a pipeline. - operationId: UpdatePipeline - parameters: - - description: The ID of the pipeline to update. - in: path - name: pipeline_id - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '404': - $ref: '#/components/responses/NotFoundResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Update a pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/restriction_policy/{resource_id}: delete: description: Deletes the restriction policy associated with a specified resource. diff --git a/src/main/java/com/datadog/api/client/ApiClient.java b/src/main/java/com/datadog/api/client/ApiClient.java index 439a59c339c..63ef0e4d2f5 100644 --- a/src/main/java/com/datadog/api/client/ApiClient.java +++ b/src/main/java/com/datadog/api/client/ApiClient.java @@ -868,6 +868,12 @@ public class ApiClient { put("v2.updateMonitorUserTemplate", false); put("v2.validateExistingMonitorUserTemplate", false); put("v2.validateMonitorUserTemplate", false); + put("v2.createPipeline", false); + put("v2.deletePipeline", false); + put("v2.getPipeline", false); + put("v2.listPipelines", false); + put("v2.updatePipeline", false); + put("v2.validatePipeline", false); put("v2.listRoleTemplates", false); put("v2.createConnection", false); put("v2.deleteConnection", false); @@ -879,12 +885,6 @@ public class ApiClient { put("v2.queryEventFilteredUsers", false); put("v2.queryUsers", false); put("v2.updateConnection", false); - put("v2.createPipeline", false); - put("v2.deletePipeline", false); - put("v2.getPipeline", false); - put("v2.listPipelines", false); - put("v2.updatePipeline", false); - put("v2.validatePipeline", false); put("v2.createScorecardOutcomesBatch", false); put("v2.createScorecardRule", false); put("v2.deleteScorecardRule", false); diff --git a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java index fc9fc737859..e7e9498f68c 100644 --- a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java +++ b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java @@ -111,7 +111,7 @@ public ApiResponse createPipelineWithHttpInfo( 400, "Missing the required parameter 'body' when calling createPipeline"); } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; Map localVarHeaderParams = new HashMap(); @@ -166,7 +166,7 @@ public CompletableFuture> createPipelineWithH return result; } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; Map localVarHeaderParams = new HashMap(); @@ -259,7 +259,7 @@ public ApiResponse deletePipelineWithHttpInfo(String pipelineId) throws Ap } // create path and map variables String localVarPath = - "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + "/api/v2/obs-pipelines/pipelines/{pipeline_id}" .replaceAll( "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); @@ -316,7 +316,7 @@ public CompletableFuture> deletePipelineWithHttpInfoAsync(Stri } // create path and map variables String localVarPath = - "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + "/api/v2/obs-pipelines/pipelines/{pipeline_id}" .replaceAll( "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); @@ -411,7 +411,7 @@ public ApiResponse getPipelineWithHttpInfo(String pipelin } // create path and map variables String localVarPath = - "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + "/api/v2/obs-pipelines/pipelines/{pipeline_id}" .replaceAll( "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); @@ -469,7 +469,7 @@ public CompletableFuture> getPipelineWithHttp } // create path and map variables String localVarPath = - "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + "/api/v2/obs-pipelines/pipelines/{pipeline_id}" .replaceAll( "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); @@ -618,7 +618,7 @@ public ApiResponse listPipelinesWithHttpInfo( Long pageSize = parameters.pageSize; Long pageNumber = parameters.pageNumber; // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; List localVarQueryParams = new ArrayList(); Map localVarHeaderParams = new HashMap(); @@ -670,7 +670,7 @@ public CompletableFuture> listPipelinesWithHt Long pageSize = parameters.pageSize; Long pageNumber = parameters.pageNumber; // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + String localVarPath = "/api/v2/obs-pipelines/pipelines"; List localVarQueryParams = new ArrayList(); Map localVarHeaderParams = new HashMap(); @@ -781,7 +781,7 @@ public ApiResponse updatePipelineWithHttpInfo( } // create path and map variables String localVarPath = - "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + "/api/v2/obs-pipelines/pipelines/{pipeline_id}" .replaceAll( "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); @@ -849,7 +849,7 @@ public CompletableFuture> updatePipelineWithH } // create path and map variables String localVarPath = - "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + "/api/v2/obs-pipelines/pipelines/{pipeline_id}" .replaceAll( "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); @@ -946,7 +946,7 @@ public ApiResponse validatePipelineWithHttpInfo( 400, "Missing the required parameter 'body' when calling validatePipeline"); } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines/validate"; + String localVarPath = "/api/v2/obs-pipelines/pipelines/validate"; Map localVarHeaderParams = new HashMap(); @@ -1001,7 +1001,7 @@ public CompletableFuture> validatePipelineWithHt return result; } // create path and map variables - String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines/validate"; + String localVarPath = "/api/v2/obs-pipelines/pipelines/validate"; Map localVarHeaderParams = new HashMap(); diff --git a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java index 2404a627a2b..1687442550d 100644 --- a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The azure_storage destination forwards logs to an Azure Blob Storage container. */ +/** + * The azure_storage destination forwards logs to an Azure Blob Storage container. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ AzureStorageDestination.JSON_PROPERTY_BLOB_PREFIX, AzureStorageDestination.JSON_PROPERTY_CONTAINER_NAME, diff --git a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java index 62d3cfd1cbc..51571518f7b 100644 --- a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The microsoft_sentinel destination forwards logs to Microsoft Sentinel. */ +/** + * The microsoft_sentinel destination forwards logs to Microsoft Sentinel. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ MicrosoftSentinelDestination.JSON_PROPERTY_CLIENT_ID, MicrosoftSentinelDestination.JSON_PROPERTY_DCR_IMMUTABLE_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java index 8acbbf172ae..95ea9fc0ff5 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The add_env_vars processor adds environment variable values to log events. */ +/** + * The add_env_vars processor adds environment variable values to log events. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_DISPLAY_NAME, ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_ENABLED, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java index f98ca98473b..3c3a4818ff2 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The add_fields processor adds static key-value fields to logs. */ +/** + * The add_fields processor adds static key-value fields to logs. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_DISPLAY_NAME, ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_ENABLED, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java new file mode 100644 index 00000000000..021141d11e7 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessor.java @@ -0,0 +1,272 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The add_hostname processor adds the hostname to log events. + * + *

Supported pipeline types: logs + */ +@JsonPropertyOrder({ + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineAddHostnameProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAddHostnameProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAddHostnameProcessorType type = + ObservabilityPipelineAddHostnameProcessorType.ADD_HOSTNAME; + + public ObservabilityPipelineAddHostnameProcessor() {} + + @JsonCreator + public ObservabilityPipelineAddHostnameProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAddHostnameProcessorType type) { + this.enabled = enabled; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAddHostnameProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineAddHostnameProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineAddHostnameProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAddHostnameProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineAddHostnameProcessor type( + ObservabilityPipelineAddHostnameProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be add_hostname. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAddHostnameProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineAddHostnameProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAddHostnameProcessor + */ + @JsonAnySetter + public ObservabilityPipelineAddHostnameProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAddHostnameProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAddHostnameProcessor observabilityPipelineAddHostnameProcessor = + (ObservabilityPipelineAddHostnameProcessor) o; + return Objects.equals(this.displayName, observabilityPipelineAddHostnameProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineAddHostnameProcessor.enabled) + && Objects.equals(this.id, observabilityPipelineAddHostnameProcessor.id) + && Objects.equals(this.include, observabilityPipelineAddHostnameProcessor.include) + && Objects.equals(this.type, observabilityPipelineAddHostnameProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAddHostnameProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, enabled, id, include, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAddHostnameProcessor {\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java new file mode 100644 index 00000000000..c635c6c8a00 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddHostnameProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be add_hostname. */ +@JsonSerialize( + using = + ObservabilityPipelineAddHostnameProcessorType + .ObservabilityPipelineAddHostnameProcessorTypeSerializer.class) +public class ObservabilityPipelineAddHostnameProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("add_hostname")); + + public static final ObservabilityPipelineAddHostnameProcessorType ADD_HOSTNAME = + new ObservabilityPipelineAddHostnameProcessorType("add_hostname"); + + ObservabilityPipelineAddHostnameProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAddHostnameProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAddHostnameProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAddHostnameProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAddHostnameProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAddHostnameProcessorType fromValue(String value) { + return new ObservabilityPipelineAddHostnameProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java index 961394d8736..5364cf4bd4f 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The amazon_data_firehose source ingests logs from AWS Data Firehose. */ +/** + * The amazon_data_firehose source ingests logs from AWS Data Firehose. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_AUTH, ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java index f8e615e74d8..57c9c89f0ca 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The amazon_opensearch destination writes logs to Amazon OpenSearch. */ +/** + * The amazon_opensearch destination writes logs to Amazon OpenSearch. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_AUTH, ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_BULK_INDEX, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java index e7b0612ac30..6538942f2ef 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java @@ -22,6 +22,8 @@ /** * The amazon_s3 destination sends your logs in Datadog-rehydratable format to an * Amazon S3 bucket for archiving. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_AUTH, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java index 2961c4fec42..ec612b3b88f 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java @@ -20,6 +20,8 @@ /** * The amazon_s3 source ingests logs from an Amazon S3 bucket. It supports AWS * authentication and TLS encryption. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_AUTH, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java index 2ea8bb4a780..221bc4b2806 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonSecurityLakeDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The amazon_security_lake destination sends your logs to Amazon Security Lake. */ +/** + * The amazon_security_lake destination sends your logs to Amazon Security Lake. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_AUTH, ObservabilityPipelineAmazonSecurityLakeDestination.JSON_PROPERTY_BUCKET, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java new file mode 100644 index 00000000000..e19c69c0234 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestination.java @@ -0,0 +1,223 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The cloud_prem destination sends logs to Datadog CloudPrem. + * + *

Supported pipeline types: logs + */ +@JsonPropertyOrder({ + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_ID, + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineCloudPremDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineCloudPremDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineCloudPremDestinationType type = + ObservabilityPipelineCloudPremDestinationType.CLOUD_PREM; + + public ObservabilityPipelineCloudPremDestination() {} + + @JsonCreator + public ObservabilityPipelineCloudPremDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineCloudPremDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineCloudPremDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineCloudPremDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineCloudPremDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineCloudPremDestination type( + ObservabilityPipelineCloudPremDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be cloud_prem. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineCloudPremDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineCloudPremDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineCloudPremDestination + */ + @JsonAnySetter + public ObservabilityPipelineCloudPremDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineCloudPremDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineCloudPremDestination observabilityPipelineCloudPremDestination = + (ObservabilityPipelineCloudPremDestination) o; + return Objects.equals(this.id, observabilityPipelineCloudPremDestination.id) + && Objects.equals(this.inputs, observabilityPipelineCloudPremDestination.inputs) + && Objects.equals(this.type, observabilityPipelineCloudPremDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineCloudPremDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineCloudPremDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java new file mode 100644 index 00000000000..e656648144e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCloudPremDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be cloud_prem. */ +@JsonSerialize( + using = + ObservabilityPipelineCloudPremDestinationType + .ObservabilityPipelineCloudPremDestinationTypeSerializer.class) +public class ObservabilityPipelineCloudPremDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("cloud_prem")); + + public static final ObservabilityPipelineCloudPremDestinationType CLOUD_PREM = + new ObservabilityPipelineCloudPremDestinationType("cloud_prem"); + + ObservabilityPipelineCloudPremDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineCloudPremDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineCloudPremDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineCloudPremDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineCloudPremDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineCloudPremDestinationType fromValue(String value) { + return new ObservabilityPipelineCloudPremDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java index 4acddfa3a70..c1829473198 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java @@ -22,6 +22,7 @@ /** Specifies the pipeline's configuration, including its sources, processors, and destinations. */ @JsonPropertyOrder({ ObservabilityPipelineConfig.JSON_PROPERTY_DESTINATIONS, + ObservabilityPipelineConfig.JSON_PROPERTY_PIPELINE_TYPE, ObservabilityPipelineConfig.JSON_PROPERTY_PROCESSORS, ObservabilityPipelineConfig.JSON_PROPERTY_SOURCES }) @@ -32,6 +33,10 @@ public class ObservabilityPipelineConfig { public static final String JSON_PROPERTY_DESTINATIONS = "destinations"; private List destinations = new ArrayList<>(); + public static final String JSON_PROPERTY_PIPELINE_TYPE = "pipeline_type"; + private ObservabilityPipelineConfigPipelineType pipelineType = + ObservabilityPipelineConfigPipelineType.LOGS; + public static final String JSON_PROPERTY_PROCESSORS = "processors"; private List processors = null; @@ -81,6 +86,32 @@ public void setDestinations(List des this.destinations = destinations; } + public ObservabilityPipelineConfig pipelineType( + ObservabilityPipelineConfigPipelineType pipelineType) { + this.pipelineType = pipelineType; + this.unparsed |= !pipelineType.isValid(); + return this; + } + + /** + * The type of data being ingested. Defaults to logs if not specified. + * + * @return pipelineType + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PIPELINE_TYPE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineConfigPipelineType getPipelineType() { + return pipelineType; + } + + public void setPipelineType(ObservabilityPipelineConfigPipelineType pipelineType) { + if (!pipelineType.isValid()) { + this.unparsed = true; + } + this.pipelineType = pipelineType; + } + public ObservabilityPipelineConfig processors( List processors) { this.processors = processors; @@ -203,6 +234,7 @@ public boolean equals(Object o) { } ObservabilityPipelineConfig observabilityPipelineConfig = (ObservabilityPipelineConfig) o; return Objects.equals(this.destinations, observabilityPipelineConfig.destinations) + && Objects.equals(this.pipelineType, observabilityPipelineConfig.pipelineType) && Objects.equals(this.processors, observabilityPipelineConfig.processors) && Objects.equals(this.sources, observabilityPipelineConfig.sources) && Objects.equals( @@ -211,7 +243,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(destinations, processors, sources, additionalProperties); + return Objects.hash(destinations, pipelineType, processors, sources, additionalProperties); } @Override @@ -219,6 +251,7 @@ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ObservabilityPipelineConfig {\n"); sb.append(" destinations: ").append(toIndentedString(destinations)).append("\n"); + sb.append(" pipelineType: ").append(toIndentedString(pipelineType)).append("\n"); sb.append(" processors: ").append(toIndentedString(processors)).append("\n"); sb.append(" sources: ").append(toIndentedString(sources)).append("\n"); sb.append(" additionalProperties: ") diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java index 2f4e68bdef4..d25d9c1b733 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java @@ -90,55 +90,109 @@ public ObservabilityPipelineConfigDestinationItem deserialize( boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); int match = 0; JsonToken token = tree.traverse(jp.getCodec()).nextToken(); - // deserialize ObservabilityPipelineDatadogLogsDestination + // deserialize ObservabilityPipelineHttpClientDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class) - || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class) - || ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class) - || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class) - || ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class) - || ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)) { + if (ObservabilityPipelineHttpClientDestination.class.equals(Integer.class) + || ObservabilityPipelineHttpClientDestination.class.equals(Long.class) + || ObservabilityPipelineHttpClientDestination.class.equals(Float.class) + || ObservabilityPipelineHttpClientDestination.class.equals(Double.class) + || ObservabilityPipelineHttpClientDestination.class.equals(Boolean.class) + || ObservabilityPipelineHttpClientDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class) - || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class)) + ((ObservabilityPipelineHttpClientDestination.class.equals(Integer.class) + || ObservabilityPipelineHttpClientDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class) - || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class)) + ((ObservabilityPipelineHttpClientDestination.class.equals(Float.class) + || ObservabilityPipelineHttpClientDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class) + (ObservabilityPipelineHttpClientDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineDatadogLogsDestination.class.equals(String.class) + (ObservabilityPipelineHttpClientDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineDatadogLogsDestination.class); + .readValueAs(ObservabilityPipelineHttpClientDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineDatadogLogsDestination) tmp).unparsed) { + if (!((ObservabilityPipelineHttpClientDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineDatadogLogsDestination'"); + "Input data matches schema 'ObservabilityPipelineHttpClientDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineDatadogLogsDestination'", + "Input data does not match schema 'ObservabilityPipelineHttpClientDestination'", + e); + } + + // deserialize ObservabilityPipelineAmazonOpenSearchDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Long.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Double.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAmazonOpenSearchDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAmazonOpenSearchDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineAmazonOpenSearchDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAmazonOpenSearchDestination'", e); } @@ -193,559 +247,616 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } - // deserialize ObservabilityPipelineGoogleCloudStorageDestination + // deserialize ObservabilityPipelineAmazonSecurityLakeDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) - || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Long.class) - || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) - || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Double.class) - || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) - || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)) { + if (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class) + || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Long.class) + || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class) + || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Double.class) + || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class) + || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) - || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class) + || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals( Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) - || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class) + || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals( Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) + (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class) + (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineGoogleCloudStorageDestination.class); + .readValueAs(ObservabilityPipelineAmazonSecurityLakeDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineGoogleCloudStorageDestination) tmp).unparsed) { + if (!((ObservabilityPipelineAmazonSecurityLakeDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineGoogleCloudStorageDestination'"); + "Input data matches schema 'ObservabilityPipelineAmazonSecurityLakeDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineGoogleCloudStorageDestination'", + "Input data does not match schema 'ObservabilityPipelineAmazonSecurityLakeDestination'", e); } - // deserialize ObservabilityPipelineSplunkHecDestination + // deserialize AzureStorageDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) - || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class) - || ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) - || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class) - || ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) - || ObservabilityPipelineSplunkHecDestination.class.equals(String.class)) { + if (AzureStorageDestination.class.equals(Integer.class) + || AzureStorageDestination.class.equals(Long.class) + || AzureStorageDestination.class.equals(Float.class) + || AzureStorageDestination.class.equals(Double.class) + || AzureStorageDestination.class.equals(Boolean.class) + || AzureStorageDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) - || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class)) + ((AzureStorageDestination.class.equals(Integer.class) + || AzureStorageDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) - || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class)) + ((AzureStorageDestination.class.equals(Float.class) + || AzureStorageDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) + (AzureStorageDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSplunkHecDestination.class.equals(String.class) + (AzureStorageDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineSplunkHecDestination.class); + tmp = tree.traverse(jp.getCodec()).readValueAs(AzureStorageDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSplunkHecDestination) tmp).unparsed) { + if (!((AzureStorageDestination) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecDestination'"); + log.log(Level.FINER, "Input data matches schema 'AzureStorageDestination'"); } } catch (Exception e) { // deserialization failed, continue - log.log( - Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSplunkHecDestination'", - e); + log.log(Level.FINER, "Input data does not match schema 'AzureStorageDestination'", e); } - // deserialize ObservabilityPipelineSumoLogicDestination + // deserialize ObservabilityPipelineCloudPremDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class) - || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class) - || ObservabilityPipelineSumoLogicDestination.class.equals(Float.class) - || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class) - || ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class) - || ObservabilityPipelineSumoLogicDestination.class.equals(String.class)) { + if (ObservabilityPipelineCloudPremDestination.class.equals(Integer.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Long.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Float.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Double.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Boolean.class) + || ObservabilityPipelineCloudPremDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class) - || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class)) + ((ObservabilityPipelineCloudPremDestination.class.equals(Integer.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSumoLogicDestination.class.equals(Float.class) - || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class)) + ((ObservabilityPipelineCloudPremDestination.class.equals(Float.class) + || ObservabilityPipelineCloudPremDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class) + (ObservabilityPipelineCloudPremDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSumoLogicDestination.class.equals(String.class) + (ObservabilityPipelineCloudPremDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineSumoLogicDestination.class); + .readValueAs(ObservabilityPipelineCloudPremDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSumoLogicDestination) tmp).unparsed) { + if (!((ObservabilityPipelineCloudPremDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicDestination'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineCloudPremDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSumoLogicDestination'", + "Input data does not match schema 'ObservabilityPipelineCloudPremDestination'", e); } - // deserialize ObservabilityPipelineElasticsearchDestination + // deserialize ObservabilityPipelineCrowdStrikeNextGenSiemDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineElasticsearchDestination.class.equals(Integer.class) - || ObservabilityPipelineElasticsearchDestination.class.equals(Long.class) - || ObservabilityPipelineElasticsearchDestination.class.equals(Float.class) - || ObservabilityPipelineElasticsearchDestination.class.equals(Double.class) - || ObservabilityPipelineElasticsearchDestination.class.equals(Boolean.class) - || ObservabilityPipelineElasticsearchDestination.class.equals(String.class)) { + if (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class) + || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Long.class) + || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class) + || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Double.class) + || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class) + || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineElasticsearchDestination.class.equals(Integer.class) - || ObservabilityPipelineElasticsearchDestination.class.equals(Long.class)) + ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class) + || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals( + Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineElasticsearchDestination.class.equals(Float.class) - || ObservabilityPipelineElasticsearchDestination.class.equals(Double.class)) + ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class) + || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals( + Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineElasticsearchDestination.class.equals(Boolean.class) + (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineElasticsearchDestination.class.equals(String.class) + (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineElasticsearchDestination.class); + .readValueAs(ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineElasticsearchDestination) tmp).unparsed) { + if (!((ObservabilityPipelineCrowdStrikeNextGenSiemDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineElasticsearchDestination'"); + "Input data matches schema 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineElasticsearchDestination'", + "Input data does not match schema" + + " 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'", e); } - // deserialize ObservabilityPipelineRsyslogDestination + // deserialize ObservabilityPipelineDatadogLogsDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineRsyslogDestination.class.equals(Integer.class) - || ObservabilityPipelineRsyslogDestination.class.equals(Long.class) - || ObservabilityPipelineRsyslogDestination.class.equals(Float.class) - || ObservabilityPipelineRsyslogDestination.class.equals(Double.class) - || ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class) - || ObservabilityPipelineRsyslogDestination.class.equals(String.class)) { + if (ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineRsyslogDestination.class.equals(Integer.class) - || ObservabilityPipelineRsyslogDestination.class.equals(Long.class)) + ((ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineRsyslogDestination.class.equals(Float.class) - || ObservabilityPipelineRsyslogDestination.class.equals(Double.class)) + ((ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class) + (ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineRsyslogDestination.class.equals(String.class) + (ObservabilityPipelineDatadogLogsDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineRsyslogDestination.class); + .readValueAs(ObservabilityPipelineDatadogLogsDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineRsyslogDestination) tmp).unparsed) { + if (!((ObservabilityPipelineDatadogLogsDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogDestination'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineDatadogLogsDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineRsyslogDestination'", + "Input data does not match schema 'ObservabilityPipelineDatadogLogsDestination'", e); } - // deserialize ObservabilityPipelineSyslogNgDestination + // deserialize ObservabilityPipelineElasticsearchDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class) - || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class) - || ObservabilityPipelineSyslogNgDestination.class.equals(Float.class) - || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class) - || ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class) - || ObservabilityPipelineSyslogNgDestination.class.equals(String.class)) { + if (ObservabilityPipelineElasticsearchDestination.class.equals(Integer.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Long.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Float.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Double.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Boolean.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class) - || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class)) + ((ObservabilityPipelineElasticsearchDestination.class.equals(Integer.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSyslogNgDestination.class.equals(Float.class) - || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class)) + ((ObservabilityPipelineElasticsearchDestination.class.equals(Float.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class) + (ObservabilityPipelineElasticsearchDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSyslogNgDestination.class.equals(String.class) + (ObservabilityPipelineElasticsearchDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineSyslogNgDestination.class); + .readValueAs(ObservabilityPipelineElasticsearchDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSyslogNgDestination) tmp).unparsed) { + if (!((ObservabilityPipelineElasticsearchDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgDestination'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineElasticsearchDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSyslogNgDestination'", + "Input data does not match schema 'ObservabilityPipelineElasticsearchDestination'", e); } - // deserialize AzureStorageDestination + // deserialize ObservabilityPipelineGoogleChronicleDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (AzureStorageDestination.class.equals(Integer.class) - || AzureStorageDestination.class.equals(Long.class) - || AzureStorageDestination.class.equals(Float.class) - || AzureStorageDestination.class.equals(Double.class) - || AzureStorageDestination.class.equals(Boolean.class) - || AzureStorageDestination.class.equals(String.class)) { + if (ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Double.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((AzureStorageDestination.class.equals(Integer.class) - || AzureStorageDestination.class.equals(Long.class)) + ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((AzureStorageDestination.class.equals(Float.class) - || AzureStorageDestination.class.equals(Double.class)) + ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals( + Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (AzureStorageDestination.class.equals(Boolean.class) + (ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (AzureStorageDestination.class.equals(String.class) + (ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(AzureStorageDestination.class); + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGoogleChronicleDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((AzureStorageDestination) tmp).unparsed) { + if (!((ObservabilityPipelineGoogleChronicleDestination) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'AzureStorageDestination'"); + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGoogleChronicleDestination'"); } } catch (Exception e) { // deserialization failed, continue - log.log(Level.FINER, "Input data does not match schema 'AzureStorageDestination'", e); + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGoogleChronicleDestination'", + e); } - // deserialize MicrosoftSentinelDestination + // deserialize ObservabilityPipelineGoogleCloudStorageDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (MicrosoftSentinelDestination.class.equals(Integer.class) - || MicrosoftSentinelDestination.class.equals(Long.class) - || MicrosoftSentinelDestination.class.equals(Float.class) - || MicrosoftSentinelDestination.class.equals(Double.class) - || MicrosoftSentinelDestination.class.equals(Boolean.class) - || MicrosoftSentinelDestination.class.equals(String.class)) { + if (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Long.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Double.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((MicrosoftSentinelDestination.class.equals(Integer.class) - || MicrosoftSentinelDestination.class.equals(Long.class)) + ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((MicrosoftSentinelDestination.class.equals(Float.class) - || MicrosoftSentinelDestination.class.equals(Double.class)) + ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (MicrosoftSentinelDestination.class.equals(Boolean.class) + (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (MicrosoftSentinelDestination.class.equals(String.class) + (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(MicrosoftSentinelDestination.class); + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGoogleCloudStorageDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((MicrosoftSentinelDestination) tmp).unparsed) { + if (!((ObservabilityPipelineGoogleCloudStorageDestination) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'MicrosoftSentinelDestination'"); + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGoogleCloudStorageDestination'"); } } catch (Exception e) { // deserialization failed, continue - log.log(Level.FINER, "Input data does not match schema 'MicrosoftSentinelDestination'", e); + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGoogleCloudStorageDestination'", + e); } - // deserialize ObservabilityPipelineGoogleChronicleDestination + // deserialize ObservabilityPipelineGooglePubSubDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class) - || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class) - || ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class) - || ObservabilityPipelineGoogleChronicleDestination.class.equals(Double.class) - || ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class) - || ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class)) { + if (ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class) + || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class) + || ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class) + || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class) + || ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class) + || ObservabilityPipelineGooglePubSubDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class) - || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class)) + ((ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class) + || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class) - || ObservabilityPipelineGoogleChronicleDestination.class.equals( - Double.class)) + ((ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class) + || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class) + (ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class) + (ObservabilityPipelineGooglePubSubDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineGoogleChronicleDestination.class); + .readValueAs(ObservabilityPipelineGooglePubSubDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineGoogleChronicleDestination) tmp).unparsed) { + if (!((ObservabilityPipelineGooglePubSubDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineGoogleChronicleDestination'"); + "Input data matches schema 'ObservabilityPipelineGooglePubSubDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineGoogleChronicleDestination'", + "Input data does not match schema 'ObservabilityPipelineGooglePubSubDestination'", e); } - // deserialize ObservabilityPipelineNewRelicDestination + // deserialize ObservabilityPipelineKafkaDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineNewRelicDestination.class.equals(Integer.class) - || ObservabilityPipelineNewRelicDestination.class.equals(Long.class) - || ObservabilityPipelineNewRelicDestination.class.equals(Float.class) - || ObservabilityPipelineNewRelicDestination.class.equals(Double.class) - || ObservabilityPipelineNewRelicDestination.class.equals(Boolean.class) - || ObservabilityPipelineNewRelicDestination.class.equals(String.class)) { + if (ObservabilityPipelineKafkaDestination.class.equals(Integer.class) + || ObservabilityPipelineKafkaDestination.class.equals(Long.class) + || ObservabilityPipelineKafkaDestination.class.equals(Float.class) + || ObservabilityPipelineKafkaDestination.class.equals(Double.class) + || ObservabilityPipelineKafkaDestination.class.equals(Boolean.class) + || ObservabilityPipelineKafkaDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineNewRelicDestination.class.equals(Integer.class) - || ObservabilityPipelineNewRelicDestination.class.equals(Long.class)) + ((ObservabilityPipelineKafkaDestination.class.equals(Integer.class) + || ObservabilityPipelineKafkaDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineNewRelicDestination.class.equals(Float.class) - || ObservabilityPipelineNewRelicDestination.class.equals(Double.class)) + ((ObservabilityPipelineKafkaDestination.class.equals(Float.class) + || ObservabilityPipelineKafkaDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineNewRelicDestination.class.equals(Boolean.class) + (ObservabilityPipelineKafkaDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineNewRelicDestination.class.equals(String.class) + (ObservabilityPipelineKafkaDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineNewRelicDestination.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineNewRelicDestination) tmp).unparsed) { + if (!((ObservabilityPipelineKafkaDestination) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineNewRelicDestination'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineNewRelicDestination'", + "Input data does not match schema 'ObservabilityPipelineKafkaDestination'", e); } - // deserialize ObservabilityPipelineSentinelOneDestination + // deserialize MicrosoftSentinelDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class) - || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class) - || ObservabilityPipelineSentinelOneDestination.class.equals(Float.class) - || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class) - || ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class) - || ObservabilityPipelineSentinelOneDestination.class.equals(String.class)) { + if (MicrosoftSentinelDestination.class.equals(Integer.class) + || MicrosoftSentinelDestination.class.equals(Long.class) + || MicrosoftSentinelDestination.class.equals(Float.class) + || MicrosoftSentinelDestination.class.equals(Double.class) + || MicrosoftSentinelDestination.class.equals(Boolean.class) + || MicrosoftSentinelDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class) - || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class)) + ((MicrosoftSentinelDestination.class.equals(Integer.class) + || MicrosoftSentinelDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSentinelOneDestination.class.equals(Float.class) - || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class)) + ((MicrosoftSentinelDestination.class.equals(Float.class) + || MicrosoftSentinelDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class) + (MicrosoftSentinelDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSentinelOneDestination.class.equals(String.class) + (MicrosoftSentinelDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(MicrosoftSentinelDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((MicrosoftSentinelDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'MicrosoftSentinelDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log(Level.FINER, "Input data does not match schema 'MicrosoftSentinelDestination'", e); + } + + // deserialize ObservabilityPipelineNewRelicDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineNewRelicDestination.class.equals(Integer.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Long.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Float.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Double.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Boolean.class) + || ObservabilityPipelineNewRelicDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineNewRelicDestination.class.equals(Integer.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineNewRelicDestination.class.equals(Float.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineNewRelicDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineNewRelicDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineSentinelOneDestination.class); + .readValueAs(ObservabilityPipelineNewRelicDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSentinelOneDestination) tmp).unparsed) { + if (!((ObservabilityPipelineNewRelicDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineSentinelOneDestination'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineNewRelicDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSentinelOneDestination'", + "Input data does not match schema 'ObservabilityPipelineNewRelicDestination'", e); } @@ -801,57 +912,106 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } - // deserialize ObservabilityPipelineAmazonOpenSearchDestination + // deserialize ObservabilityPipelineRsyslogDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class) - || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Long.class) - || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class) - || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Double.class) - || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class) - || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class)) { + if (ObservabilityPipelineRsyslogDestination.class.equals(Integer.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Long.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Float.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Double.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class) + || ObservabilityPipelineRsyslogDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class) - || ObservabilityPipelineAmazonOpenSearchDestination.class.equals( - Long.class)) + ((ObservabilityPipelineRsyslogDestination.class.equals(Integer.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class) - || ObservabilityPipelineAmazonOpenSearchDestination.class.equals( - Double.class)) + ((ObservabilityPipelineRsyslogDestination.class.equals(Float.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class) + (ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class) + (ObservabilityPipelineRsyslogDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAmazonOpenSearchDestination.class); + .readValueAs(ObservabilityPipelineRsyslogDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAmazonOpenSearchDestination) tmp).unparsed) { + if (!((ObservabilityPipelineRsyslogDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineRsyslogDestination'", + e); + } + + // deserialize ObservabilityPipelineSentinelOneDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Float.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSentinelOneDestination.class.equals(Float.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSentinelOneDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSentinelOneDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSentinelOneDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineAmazonOpenSearchDestination'"); + "Input data matches schema 'ObservabilityPipelineSentinelOneDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAmazonOpenSearchDestination'", + "Input data does not match schema 'ObservabilityPipelineSentinelOneDestination'", e); } @@ -906,164 +1066,209 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } - // deserialize ObservabilityPipelineAmazonSecurityLakeDestination + // deserialize ObservabilityPipelineSplunkHecDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class) - || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Long.class) - || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class) - || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Double.class) - || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class) - || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class)) { + if (ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Integer.class) - || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals( - Long.class)) + ((ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Float.class) - || ObservabilityPipelineAmazonSecurityLakeDestination.class.equals( - Double.class)) + ((ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(Boolean.class) + (ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAmazonSecurityLakeDestination.class.equals(String.class) + (ObservabilityPipelineSplunkHecDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAmazonSecurityLakeDestination.class); + .readValueAs(ObservabilityPipelineSplunkHecDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAmazonSecurityLakeDestination) tmp).unparsed) { + if (!((ObservabilityPipelineSplunkHecDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineAmazonSecurityLakeDestination'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAmazonSecurityLakeDestination'", + "Input data does not match schema 'ObservabilityPipelineSplunkHecDestination'", e); } - // deserialize ObservabilityPipelineCrowdStrikeNextGenSiemDestination + // deserialize ObservabilityPipelineSumoLogicDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class) - || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Long.class) - || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class) - || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Double.class) - || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class) - || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class)) { + if (ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Float.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Integer.class) - || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals( - Long.class)) + ((ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Float.class) - || ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals( - Double.class)) + ((ObservabilityPipelineSumoLogicDestination.class.equals(Float.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(Boolean.class) + (ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class.equals(String.class) + (ObservabilityPipelineSumoLogicDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class); + .readValueAs(ObservabilityPipelineSumoLogicDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineCrowdStrikeNextGenSiemDestination) tmp).unparsed) { + if (!((ObservabilityPipelineSumoLogicDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema" - + " 'ObservabilityPipelineCrowdStrikeNextGenSiemDestination'", + "Input data does not match schema 'ObservabilityPipelineSumoLogicDestination'", e); } - // deserialize ObservabilityPipelineGooglePubSubDestination + // deserialize ObservabilityPipelineSyslogNgDestination try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class) - || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class) - || ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class) - || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class) - || ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class) - || ObservabilityPipelineGooglePubSubDestination.class.equals(String.class)) { + if (ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Float.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineGooglePubSubDestination.class.equals(Integer.class) - || ObservabilityPipelineGooglePubSubDestination.class.equals(Long.class)) + ((ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineGooglePubSubDestination.class.equals(Float.class) - || ObservabilityPipelineGooglePubSubDestination.class.equals(Double.class)) + ((ObservabilityPipelineSyslogNgDestination.class.equals(Float.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineGooglePubSubDestination.class.equals(Boolean.class) + (ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineGooglePubSubDestination.class.equals(String.class) + (ObservabilityPipelineSyslogNgDestination.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineGooglePubSubDestination.class); + .readValueAs(ObservabilityPipelineSyslogNgDestination.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineGooglePubSubDestination) tmp).unparsed) { + if (!((ObservabilityPipelineSyslogNgDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSyslogNgDestination'", + e); + } + + // deserialize ObservabilityPipelineDatadogMetricsDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineDatadogMetricsDestination.class.equals(Integer.class) + || ObservabilityPipelineDatadogMetricsDestination.class.equals(Long.class) + || ObservabilityPipelineDatadogMetricsDestination.class.equals(Float.class) + || ObservabilityPipelineDatadogMetricsDestination.class.equals(Double.class) + || ObservabilityPipelineDatadogMetricsDestination.class.equals(Boolean.class) + || ObservabilityPipelineDatadogMetricsDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineDatadogMetricsDestination.class.equals(Integer.class) + || ObservabilityPipelineDatadogMetricsDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineDatadogMetricsDestination.class.equals(Float.class) + || ObservabilityPipelineDatadogMetricsDestination.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineDatadogMetricsDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineDatadogMetricsDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineDatadogMetricsDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineDatadogMetricsDestination) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineGooglePubSubDestination'"); + "Input data matches schema 'ObservabilityPipelineDatadogMetricsDestination'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineGooglePubSubDestination'", + "Input data does not match schema 'ObservabilityPipelineDatadogMetricsDestination'", e); } @@ -1098,7 +1303,13 @@ public ObservabilityPipelineConfigDestinationItem() { super("oneOf", Boolean.FALSE); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLogsDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineHttpClientDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineAmazonOpenSearchDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1109,59 +1320,67 @@ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineAmazonS3D } public ObservabilityPipelineConfigDestinationItem( - ObservabilityPipelineGoogleCloudStorageDestination o) { + ObservabilityPipelineAmazonSecurityLakeDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSplunkHecDestination o) { + public ObservabilityPipelineConfigDestinationItem(AzureStorageDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSumoLogicDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineCloudPremDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } public ObservabilityPipelineConfigDestinationItem( - ObservabilityPipelineElasticsearchDestination o) { + ObservabilityPipelineCrowdStrikeNextGenSiemDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineRsyslogDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLogsDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSyslogNgDestination o) { + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineElasticsearchDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(AzureStorageDestination o) { + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineGoogleChronicleDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(MicrosoftSentinelDestination o) { + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineGoogleCloudStorageDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } public ObservabilityPipelineConfigDestinationItem( - ObservabilityPipelineGoogleChronicleDestination o) { + ObservabilityPipelineGooglePubSubDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineNewRelicDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineKafkaDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSentinelOneDestination o) { + public ObservabilityPipelineConfigDestinationItem(MicrosoftSentinelDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineNewRelicDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1171,89 +1390,108 @@ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineOpenSearc setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem( - ObservabilityPipelineAmazonOpenSearchDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineRsyslogDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSocketDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSentinelOneDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem( - ObservabilityPipelineAmazonSecurityLakeDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSocketDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem( - ObservabilityPipelineCrowdStrikeNextGenSiemDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSplunkHecDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigDestinationItem( - ObservabilityPipelineGooglePubSubDestination o) { + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSumoLogicDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSyslogNgDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineDatadogMetricsDestination o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } static { schemas.put( - "ObservabilityPipelineDatadogLogsDestination", - new GenericType() {}); + "ObservabilityPipelineHttpClientDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAmazonOpenSearchDestination", + new GenericType() {}); schemas.put( "ObservabilityPipelineAmazonS3Destination", new GenericType() {}); schemas.put( - "ObservabilityPipelineGoogleCloudStorageDestination", - new GenericType() {}); + "ObservabilityPipelineAmazonSecurityLakeDestination", + new GenericType() {}); + schemas.put("AzureStorageDestination", new GenericType() {}); schemas.put( - "ObservabilityPipelineSplunkHecDestination", - new GenericType() {}); + "ObservabilityPipelineCloudPremDestination", + new GenericType() {}); schemas.put( - "ObservabilityPipelineSumoLogicDestination", - new GenericType() {}); + "ObservabilityPipelineCrowdStrikeNextGenSiemDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDatadogLogsDestination", + new GenericType() {}); schemas.put( "ObservabilityPipelineElasticsearchDestination", new GenericType() {}); - schemas.put( - "ObservabilityPipelineRsyslogDestination", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineSyslogNgDestination", - new GenericType() {}); - schemas.put("AzureStorageDestination", new GenericType() {}); - schemas.put("MicrosoftSentinelDestination", new GenericType() {}); schemas.put( "ObservabilityPipelineGoogleChronicleDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineGoogleCloudStorageDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineGooglePubSubDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineKafkaDestination", + new GenericType() {}); + schemas.put("MicrosoftSentinelDestination", new GenericType() {}); schemas.put( "ObservabilityPipelineNewRelicDestination", new GenericType() {}); - schemas.put( - "ObservabilityPipelineSentinelOneDestination", - new GenericType() {}); schemas.put( "ObservabilityPipelineOpenSearchDestination", new GenericType() {}); schemas.put( - "ObservabilityPipelineAmazonOpenSearchDestination", - new GenericType() {}); + "ObservabilityPipelineRsyslogDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSentinelOneDestination", + new GenericType() {}); schemas.put( "ObservabilityPipelineSocketDestination", new GenericType() {}); schemas.put( - "ObservabilityPipelineAmazonSecurityLakeDestination", - new GenericType() {}); + "ObservabilityPipelineSplunkHecDestination", + new GenericType() {}); schemas.put( - "ObservabilityPipelineCrowdStrikeNextGenSiemDestination", - new GenericType() {}); + "ObservabilityPipelineSumoLogicDestination", + new GenericType() {}); schemas.put( - "ObservabilityPipelineGooglePubSubDestination", - new GenericType() {}); + "ObservabilityPipelineSyslogNgDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDatadogMetricsDestination", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigDestinationItem.class, Collections.unmodifiableMap(schemas)); } @@ -1265,17 +1503,20 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid - * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination, - * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, - * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, - * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, - * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, - * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, - * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, - * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination, - * ObservabilityPipelineSocketDestination, ObservabilityPipelineAmazonSecurityLakeDestination, + * against the oneOf child schemas: ObservabilityPipelineHttpClientDestination, + * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination, + * ObservabilityPipelineCloudPremDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination + * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineElasticsearchDestination, + * ObservabilityPipelineGoogleChronicleDestination, + * ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination, + * MicrosoftSentinelDestination, ObservabilityPipelineNewRelicDestination, + * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineRsyslogDestination, + * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineSocketDestination, + * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, + * ObservabilityPipelineSyslogNgDestination, ObservabilityPipelineDatadogMetricsDestination * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1283,7 +1524,14 @@ public Map getSchemas() { @Override public void setActualInstance(Object instance) { if (JSON.isInstanceOf( - ObservabilityPipelineDatadogLogsDestination.class, instance, new HashSet>())) { + ObservabilityPipelineHttpClientDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAmazonOpenSearchDestination.class, + instance, + new HashSet>())) { super.setActualInstance(instance); return; } @@ -1293,47 +1541,61 @@ public void setActualInstance(Object instance) { return; } if (JSON.isInstanceOf( - ObservabilityPipelineGoogleCloudStorageDestination.class, + ObservabilityPipelineAmazonSecurityLakeDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf(AzureStorageDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf( - ObservabilityPipelineSplunkHecDestination.class, instance, new HashSet>())) { + ObservabilityPipelineCloudPremDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSumoLogicDestination.class, instance, new HashSet>())) { + ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class, + instance, + new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineElasticsearchDestination.class, instance, new HashSet>())) { + ObservabilityPipelineDatadogLogsDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineRsyslogDestination.class, instance, new HashSet>())) { + ObservabilityPipelineElasticsearchDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSyslogNgDestination.class, instance, new HashSet>())) { + ObservabilityPipelineGoogleChronicleDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } - if (JSON.isInstanceOf(AzureStorageDestination.class, instance, new HashSet>())) { + if (JSON.isInstanceOf( + ObservabilityPipelineGoogleCloudStorageDestination.class, + instance, + new HashSet>())) { super.setActualInstance(instance); return; } - if (JSON.isInstanceOf(MicrosoftSentinelDestination.class, instance, new HashSet>())) { + if (JSON.isInstanceOf( + ObservabilityPipelineGooglePubSubDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineGoogleChronicleDestination.class, instance, new HashSet>())) { + ObservabilityPipelineKafkaDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf(MicrosoftSentinelDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1343,19 +1605,17 @@ public void setActualInstance(Object instance) { return; } if (JSON.isInstanceOf( - ObservabilityPipelineSentinelOneDestination.class, instance, new HashSet>())) { + ObservabilityPipelineOpenSearchDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineOpenSearchDestination.class, instance, new HashSet>())) { + ObservabilityPipelineRsyslogDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAmazonOpenSearchDestination.class, - instance, - new HashSet>())) { + ObservabilityPipelineSentinelOneDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1365,21 +1625,22 @@ public void setActualInstance(Object instance) { return; } if (JSON.isInstanceOf( - ObservabilityPipelineAmazonSecurityLakeDestination.class, - instance, - new HashSet>())) { + ObservabilityPipelineSplunkHecDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineCrowdStrikeNextGenSiemDestination.class, - instance, - new HashSet>())) { + ObservabilityPipelineSumoLogicDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineGooglePubSubDestination.class, instance, new HashSet>())) { + ObservabilityPipelineSyslogNgDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineDatadogMetricsDestination.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1389,52 +1650,58 @@ public void setActualInstance(Object instance) { return; } throw new RuntimeException( - "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination," + "Invalid instance type. Must be ObservabilityPipelineHttpClientDestination," + + " ObservabilityPipelineAmazonOpenSearchDestination," + " ObservabilityPipelineAmazonS3Destination," - + " ObservabilityPipelineGoogleCloudStorageDestination," - + " ObservabilityPipelineSplunkHecDestination," - + " ObservabilityPipelineSumoLogicDestination," + + " ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination," + + " ObservabilityPipelineCloudPremDestination," + + " ObservabilityPipelineCrowdStrikeNextGenSiemDestination," + + " ObservabilityPipelineDatadogLogsDestination," + " ObservabilityPipelineElasticsearchDestination," - + " ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination," - + " AzureStorageDestination, MicrosoftSentinelDestination," + " ObservabilityPipelineGoogleChronicleDestination," + + " ObservabilityPipelineGoogleCloudStorageDestination," + + " ObservabilityPipelineGooglePubSubDestination," + + " ObservabilityPipelineKafkaDestination, MicrosoftSentinelDestination," + " ObservabilityPipelineNewRelicDestination," - + " ObservabilityPipelineSentinelOneDestination," + " ObservabilityPipelineOpenSearchDestination," - + " ObservabilityPipelineAmazonOpenSearchDestination," - + " ObservabilityPipelineSocketDestination," - + " ObservabilityPipelineAmazonSecurityLakeDestination," - + " ObservabilityPipelineCrowdStrikeNextGenSiemDestination," - + " ObservabilityPipelineGooglePubSubDestination"); + + " ObservabilityPipelineRsyslogDestination," + + " ObservabilityPipelineSentinelOneDestination," + + " ObservabilityPipelineSocketDestination, ObservabilityPipelineSplunkHecDestination," + + " ObservabilityPipelineSumoLogicDestination," + + " ObservabilityPipelineSyslogNgDestination," + + " ObservabilityPipelineDatadogMetricsDestination"); } /** * Get the actual instance, which can be the following: - * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineAmazonS3Destination, - * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, - * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination, - * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination, - * AzureStorageDestination, MicrosoftSentinelDestination, - * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination, - * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, - * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, - * ObservabilityPipelineAmazonSecurityLakeDestination, + * ObservabilityPipelineHttpClientDestination, ObservabilityPipelineAmazonOpenSearchDestination, + * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineAmazonSecurityLakeDestination, + * AzureStorageDestination, ObservabilityPipelineCloudPremDestination, * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination + * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineElasticsearchDestination, + * ObservabilityPipelineGoogleChronicleDestination, + * ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination, + * MicrosoftSentinelDestination, ObservabilityPipelineNewRelicDestination, + * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineRsyslogDestination, + * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineSocketDestination, + * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, + * ObservabilityPipelineSyslogNgDestination, ObservabilityPipelineDatadogMetricsDestination * - * @return The actual instance (ObservabilityPipelineDatadogLogsDestination, - * ObservabilityPipelineAmazonS3Destination, + * @return The actual instance (ObservabilityPipelineHttpClientDestination, + * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination, + * ObservabilityPipelineCloudPremDestination, + * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineElasticsearchDestination, + * ObservabilityPipelineGoogleChronicleDestination, * ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineGooglePubSubDestination, ObservabilityPipelineKafkaDestination, + * MicrosoftSentinelDestination, ObservabilityPipelineNewRelicDestination, + * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineRsyslogDestination, + * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineSocketDestination, * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, - * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, - * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, - * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, - * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, - * ObservabilityPipelineOpenSearchDestination, - * ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineSocketDestination, - * ObservabilityPipelineAmazonSecurityLakeDestination, - * ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - * ObservabilityPipelineGooglePubSubDestination) + * ObservabilityPipelineSyslogNgDestination, ObservabilityPipelineDatadogMetricsDestination) */ @Override public Object getActualInstance() { @@ -1442,16 +1709,29 @@ public Object getActualInstance() { } /** - * Get the actual instance of `ObservabilityPipelineDatadogLogsDestination`. If the actual - * instance is not `ObservabilityPipelineDatadogLogsDestination`, the ClassCastException will be - * thrown. + * Get the actual instance of `ObservabilityPipelineHttpClientDestination`. If the actual instance + * is not `ObservabilityPipelineHttpClientDestination`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineDatadogLogsDestination` - * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogLogsDestination` + * @return The actual instance of `ObservabilityPipelineHttpClientDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpClientDestination` */ - public ObservabilityPipelineDatadogLogsDestination - getObservabilityPipelineDatadogLogsDestination() throws ClassCastException { - return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance(); + public ObservabilityPipelineHttpClientDestination getObservabilityPipelineHttpClientDestination() + throws ClassCastException { + return (ObservabilityPipelineHttpClientDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineAmazonOpenSearchDestination`. If the actual + * instance is not `ObservabilityPipelineAmazonOpenSearchDestination`, the ClassCastException will + * be thrown. + * + * @return The actual instance of `ObservabilityPipelineAmazonOpenSearchDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineAmazonOpenSearchDestination` + */ + public ObservabilityPipelineAmazonOpenSearchDestination + getObservabilityPipelineAmazonOpenSearchDestination() throws ClassCastException { + return (ObservabilityPipelineAmazonOpenSearchDestination) super.getActualInstance(); } /** @@ -1467,41 +1747,67 @@ public ObservabilityPipelineAmazonS3Destination getObservabilityPipelineAmazonS3 } /** - * Get the actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`. If the actual - * instance is not `ObservabilityPipelineGoogleCloudStorageDestination`, the ClassCastException + * Get the actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination`. If the actual + * instance is not `ObservabilityPipelineAmazonSecurityLakeDestination`, the ClassCastException * will be thrown. * - * @return The actual instance of `ObservabilityPipelineGoogleCloudStorageDestination` + * @return The actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination` * @throws ClassCastException if the instance is not - * `ObservabilityPipelineGoogleCloudStorageDestination` + * `ObservabilityPipelineAmazonSecurityLakeDestination` */ - public ObservabilityPipelineGoogleCloudStorageDestination - getObservabilityPipelineGoogleCloudStorageDestination() throws ClassCastException { - return (ObservabilityPipelineGoogleCloudStorageDestination) super.getActualInstance(); + public ObservabilityPipelineAmazonSecurityLakeDestination + getObservabilityPipelineAmazonSecurityLakeDestination() throws ClassCastException { + return (ObservabilityPipelineAmazonSecurityLakeDestination) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineSplunkHecDestination`. If the actual instance - * is not `ObservabilityPipelineSplunkHecDestination`, the ClassCastException will be thrown. + * Get the actual instance of `AzureStorageDestination`. If the actual instance is not + * `AzureStorageDestination`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSplunkHecDestination` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecDestination` + * @return The actual instance of `AzureStorageDestination` + * @throws ClassCastException if the instance is not `AzureStorageDestination` */ - public ObservabilityPipelineSplunkHecDestination getObservabilityPipelineSplunkHecDestination() - throws ClassCastException { - return (ObservabilityPipelineSplunkHecDestination) super.getActualInstance(); + public AzureStorageDestination getAzureStorageDestination() throws ClassCastException { + return (AzureStorageDestination) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineSumoLogicDestination`. If the actual instance - * is not `ObservabilityPipelineSumoLogicDestination`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineCloudPremDestination`. If the actual instance + * is not `ObservabilityPipelineCloudPremDestination`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSumoLogicDestination` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicDestination` + * @return The actual instance of `ObservabilityPipelineCloudPremDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineCloudPremDestination` */ - public ObservabilityPipelineSumoLogicDestination getObservabilityPipelineSumoLogicDestination() + public ObservabilityPipelineCloudPremDestination getObservabilityPipelineCloudPremDestination() throws ClassCastException { - return (ObservabilityPipelineSumoLogicDestination) super.getActualInstance(); + return (ObservabilityPipelineCloudPremDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`. If the + * actual instance is not `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`, the + * ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineCrowdStrikeNextGenSiemDestination` + */ + public ObservabilityPipelineCrowdStrikeNextGenSiemDestination + getObservabilityPipelineCrowdStrikeNextGenSiemDestination() throws ClassCastException { + return (ObservabilityPipelineCrowdStrikeNextGenSiemDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineDatadogLogsDestination`. If the actual + * instance is not `ObservabilityPipelineDatadogLogsDestination`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineDatadogLogsDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogLogsDestination` + */ + public ObservabilityPipelineDatadogLogsDestination + getObservabilityPipelineDatadogLogsDestination() throws ClassCastException { + return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance(); } /** @@ -1519,38 +1825,57 @@ public ObservabilityPipelineSumoLogicDestination getObservabilityPipelineSumoLog } /** - * Get the actual instance of `ObservabilityPipelineRsyslogDestination`. If the actual instance is - * not `ObservabilityPipelineRsyslogDestination`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineGoogleChronicleDestination`. If the actual + * instance is not `ObservabilityPipelineGoogleChronicleDestination`, the ClassCastException will + * be thrown. * - * @return The actual instance of `ObservabilityPipelineRsyslogDestination` - * @throws ClassCastException if the instance is not `ObservabilityPipelineRsyslogDestination` + * @return The actual instance of `ObservabilityPipelineGoogleChronicleDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGoogleChronicleDestination` */ - public ObservabilityPipelineRsyslogDestination getObservabilityPipelineRsyslogDestination() - throws ClassCastException { - return (ObservabilityPipelineRsyslogDestination) super.getActualInstance(); + public ObservabilityPipelineGoogleChronicleDestination + getObservabilityPipelineGoogleChronicleDestination() throws ClassCastException { + return (ObservabilityPipelineGoogleChronicleDestination) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineSyslogNgDestination`. If the actual instance - * is not `ObservabilityPipelineSyslogNgDestination`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`. If the actual + * instance is not `ObservabilityPipelineGoogleCloudStorageDestination`, the ClassCastException + * will be thrown. * - * @return The actual instance of `ObservabilityPipelineSyslogNgDestination` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgDestination` + * @return The actual instance of `ObservabilityPipelineGoogleCloudStorageDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGoogleCloudStorageDestination` */ - public ObservabilityPipelineSyslogNgDestination getObservabilityPipelineSyslogNgDestination() - throws ClassCastException { - return (ObservabilityPipelineSyslogNgDestination) super.getActualInstance(); + public ObservabilityPipelineGoogleCloudStorageDestination + getObservabilityPipelineGoogleCloudStorageDestination() throws ClassCastException { + return (ObservabilityPipelineGoogleCloudStorageDestination) super.getActualInstance(); } /** - * Get the actual instance of `AzureStorageDestination`. If the actual instance is not - * `AzureStorageDestination`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineGooglePubSubDestination`. If the actual + * instance is not `ObservabilityPipelineGooglePubSubDestination`, the ClassCastException will be + * thrown. * - * @return The actual instance of `AzureStorageDestination` - * @throws ClassCastException if the instance is not `AzureStorageDestination` + * @return The actual instance of `ObservabilityPipelineGooglePubSubDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGooglePubSubDestination` */ - public AzureStorageDestination getAzureStorageDestination() throws ClassCastException { - return (AzureStorageDestination) super.getActualInstance(); + public ObservabilityPipelineGooglePubSubDestination + getObservabilityPipelineGooglePubSubDestination() throws ClassCastException { + return (ObservabilityPipelineGooglePubSubDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineKafkaDestination`. If the actual instance is + * not `ObservabilityPipelineKafkaDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineKafkaDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaDestination` + */ + public ObservabilityPipelineKafkaDestination getObservabilityPipelineKafkaDestination() + throws ClassCastException { + return (ObservabilityPipelineKafkaDestination) super.getActualInstance(); } /** @@ -1564,20 +1889,6 @@ public MicrosoftSentinelDestination getMicrosoftSentinelDestination() throws Cla return (MicrosoftSentinelDestination) super.getActualInstance(); } - /** - * Get the actual instance of `ObservabilityPipelineGoogleChronicleDestination`. If the actual - * instance is not `ObservabilityPipelineGoogleChronicleDestination`, the ClassCastException will - * be thrown. - * - * @return The actual instance of `ObservabilityPipelineGoogleChronicleDestination` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineGoogleChronicleDestination` - */ - public ObservabilityPipelineGoogleChronicleDestination - getObservabilityPipelineGoogleChronicleDestination() throws ClassCastException { - return (ObservabilityPipelineGoogleChronicleDestination) super.getActualInstance(); - } - /** * Get the actual instance of `ObservabilityPipelineNewRelicDestination`. If the actual instance * is not `ObservabilityPipelineNewRelicDestination`, the ClassCastException will be thrown. @@ -1590,19 +1901,6 @@ public ObservabilityPipelineNewRelicDestination getObservabilityPipelineNewRelic return (ObservabilityPipelineNewRelicDestination) super.getActualInstance(); } - /** - * Get the actual instance of `ObservabilityPipelineSentinelOneDestination`. If the actual - * instance is not `ObservabilityPipelineSentinelOneDestination`, the ClassCastException will be - * thrown. - * - * @return The actual instance of `ObservabilityPipelineSentinelOneDestination` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSentinelOneDestination` - */ - public ObservabilityPipelineSentinelOneDestination - getObservabilityPipelineSentinelOneDestination() throws ClassCastException { - return (ObservabilityPipelineSentinelOneDestination) super.getActualInstance(); - } - /** * Get the actual instance of `ObservabilityPipelineOpenSearchDestination`. If the actual instance * is not `ObservabilityPipelineOpenSearchDestination`, the ClassCastException will be thrown. @@ -1616,17 +1914,28 @@ public ObservabilityPipelineOpenSearchDestination getObservabilityPipelineOpenSe } /** - * Get the actual instance of `ObservabilityPipelineAmazonOpenSearchDestination`. If the actual - * instance is not `ObservabilityPipelineAmazonOpenSearchDestination`, the ClassCastException will - * be thrown. + * Get the actual instance of `ObservabilityPipelineRsyslogDestination`. If the actual instance is + * not `ObservabilityPipelineRsyslogDestination`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAmazonOpenSearchDestination` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineAmazonOpenSearchDestination` + * @return The actual instance of `ObservabilityPipelineRsyslogDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRsyslogDestination` */ - public ObservabilityPipelineAmazonOpenSearchDestination - getObservabilityPipelineAmazonOpenSearchDestination() throws ClassCastException { - return (ObservabilityPipelineAmazonOpenSearchDestination) super.getActualInstance(); + public ObservabilityPipelineRsyslogDestination getObservabilityPipelineRsyslogDestination() + throws ClassCastException { + return (ObservabilityPipelineRsyslogDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSentinelOneDestination`. If the actual + * instance is not `ObservabilityPipelineSentinelOneDestination`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineSentinelOneDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSentinelOneDestination` + */ + public ObservabilityPipelineSentinelOneDestination + getObservabilityPipelineSentinelOneDestination() throws ClassCastException { + return (ObservabilityPipelineSentinelOneDestination) super.getActualInstance(); } /** @@ -1642,44 +1951,52 @@ public ObservabilityPipelineSocketDestination getObservabilityPipelineSocketDest } /** - * Get the actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination`. If the actual - * instance is not `ObservabilityPipelineAmazonSecurityLakeDestination`, the ClassCastException - * will be thrown. + * Get the actual instance of `ObservabilityPipelineSplunkHecDestination`. If the actual instance + * is not `ObservabilityPipelineSplunkHecDestination`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAmazonSecurityLakeDestination` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineAmazonSecurityLakeDestination` + * @return The actual instance of `ObservabilityPipelineSplunkHecDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecDestination` */ - public ObservabilityPipelineAmazonSecurityLakeDestination - getObservabilityPipelineAmazonSecurityLakeDestination() throws ClassCastException { - return (ObservabilityPipelineAmazonSecurityLakeDestination) super.getActualInstance(); + public ObservabilityPipelineSplunkHecDestination getObservabilityPipelineSplunkHecDestination() + throws ClassCastException { + return (ObservabilityPipelineSplunkHecDestination) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`. If the - * actual instance is not `ObservabilityPipelineCrowdStrikeNextGenSiemDestination`, the - * ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSumoLogicDestination`. If the actual instance + * is not `ObservabilityPipelineSumoLogicDestination`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineCrowdStrikeNextGenSiemDestination` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineCrowdStrikeNextGenSiemDestination` + * @return The actual instance of `ObservabilityPipelineSumoLogicDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicDestination` */ - public ObservabilityPipelineCrowdStrikeNextGenSiemDestination - getObservabilityPipelineCrowdStrikeNextGenSiemDestination() throws ClassCastException { - return (ObservabilityPipelineCrowdStrikeNextGenSiemDestination) super.getActualInstance(); + public ObservabilityPipelineSumoLogicDestination getObservabilityPipelineSumoLogicDestination() + throws ClassCastException { + return (ObservabilityPipelineSumoLogicDestination) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineGooglePubSubDestination`. If the actual - * instance is not `ObservabilityPipelineGooglePubSubDestination`, the ClassCastException will be - * thrown. + * Get the actual instance of `ObservabilityPipelineSyslogNgDestination`. If the actual instance + * is not `ObservabilityPipelineSyslogNgDestination`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineGooglePubSubDestination` + * @return The actual instance of `ObservabilityPipelineSyslogNgDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgDestination` + */ + public ObservabilityPipelineSyslogNgDestination getObservabilityPipelineSyslogNgDestination() + throws ClassCastException { + return (ObservabilityPipelineSyslogNgDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineDatadogMetricsDestination`. If the actual + * instance is not `ObservabilityPipelineDatadogMetricsDestination`, the ClassCastException will + * be thrown. + * + * @return The actual instance of `ObservabilityPipelineDatadogMetricsDestination` * @throws ClassCastException if the instance is not - * `ObservabilityPipelineGooglePubSubDestination` + * `ObservabilityPipelineDatadogMetricsDestination` */ - public ObservabilityPipelineGooglePubSubDestination - getObservabilityPipelineGooglePubSubDestination() throws ClassCastException { - return (ObservabilityPipelineGooglePubSubDestination) super.getActualInstance(); + public ObservabilityPipelineDatadogMetricsDestination + getObservabilityPipelineDatadogMetricsDestination() throws ClassCastException { + return (ObservabilityPipelineDatadogMetricsDestination) super.getActualInstance(); } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigPipelineType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigPipelineType.java new file mode 100644 index 00000000000..7020de23586 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigPipelineType.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The type of data being ingested. Defaults to logs if not specified. */ +@JsonSerialize( + using = + ObservabilityPipelineConfigPipelineType.ObservabilityPipelineConfigPipelineTypeSerializer + .class) +public class ObservabilityPipelineConfigPipelineType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("logs", "metrics")); + + public static final ObservabilityPipelineConfigPipelineType LOGS = + new ObservabilityPipelineConfigPipelineType("logs"); + public static final ObservabilityPipelineConfigPipelineType METRICS = + new ObservabilityPipelineConfigPipelineType("metrics"); + + ObservabilityPipelineConfigPipelineType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineConfigPipelineTypeSerializer + extends StdSerializer { + public ObservabilityPipelineConfigPipelineTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineConfigPipelineTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineConfigPipelineType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineConfigPipelineType fromValue(String value) { + return new ObservabilityPipelineConfigPipelineType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java index 3ec77e02e20..db537b1a3bc 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java @@ -139,102 +139,54 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } - // deserialize ObservabilityPipelineParseJSONProcessor + // deserialize ObservabilityPipelineAddEnvVarsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) - || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineParseJSONProcessor.class.equals(String.class) + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineParseJSONProcessor.class); + .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'"); - } - } catch (Exception e) { - // deserialization failed, continue - log.log( - Level.FINER, - "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'", - e); - } - - // deserialize ObservabilityPipelineQuotaProcessor - try { - boolean attemptParsing = true; - // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Long.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Float.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Double.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) - || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) { - attemptParsing = typeCoercion; - if (!attemptParsing) { - attemptParsing |= - ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)) - && token == JsonToken.VALUE_NUMBER_INT); - attemptParsing |= - ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class) - || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)) - && (token == JsonToken.VALUE_NUMBER_FLOAT - || token == JsonToken.VALUE_NUMBER_INT)); - attemptParsing |= - (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) - && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); - attemptParsing |= - (ObservabilityPipelineQuotaProcessor.class.equals(String.class) - && token == JsonToken.VALUE_STRING); - } - } - if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class); - // TODO: there is no validation against JSON schema constraints - // (min, max, enum, pattern...), this does not perform a strict JSON - // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) { - deserialized = tmp; - match++; - } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'", e); } @@ -289,313 +241,307 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } - // deserialize ObservabilityPipelineRemoveFieldsProcessor + // deserialize ObservabilityPipelineAddHostnameProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineAddHostnameProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Long.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Float.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Double.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineAddHostnameProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineAddHostnameProcessor.class.equals(Float.class) + || ObservabilityPipelineAddHostnameProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineAddHostnameProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineAddHostnameProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class); + .readValueAs(ObservabilityPipelineAddHostnameProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineAddHostnameProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddHostnameProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineAddHostnameProcessor'", e); } - // deserialize ObservabilityPipelineRenameFieldsProcessor + // deserialize ObservabilityPipelineCustomProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class) + || ObservabilityPipelineCustomProcessor.class.equals(Long.class) + || ObservabilityPipelineCustomProcessor.class.equals(Float.class) + || ObservabilityPipelineCustomProcessor.class.equals(Double.class) + || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) + || ObservabilityPipelineCustomProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class) + || ObservabilityPipelineCustomProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) - || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineCustomProcessor.class.equals(Float.class) + || ObservabilityPipelineCustomProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class) + (ObservabilityPipelineCustomProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'", + "Input data does not match schema 'ObservabilityPipelineCustomProcessor'", e); } - // deserialize ObservabilityPipelineGenerateMetricsProcessor + // deserialize ObservabilityPipelineDatadogTagsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) - || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) + (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); + .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", + "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'", e); } - // deserialize ObservabilityPipelineSampleProcessor + // deserialize ObservabilityPipelineDedupeProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class) - || ObservabilityPipelineSampleProcessor.class.equals(Long.class) - || ObservabilityPipelineSampleProcessor.class.equals(Float.class) - || ObservabilityPipelineSampleProcessor.class.equals(Double.class) - || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) - || ObservabilityPipelineSampleProcessor.class.equals(String.class)) { + if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class) - || ObservabilityPipelineSampleProcessor.class.equals(Long.class)) + ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSampleProcessor.class.equals(Float.class) - || ObservabilityPipelineSampleProcessor.class.equals(Double.class)) + ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSampleProcessor.class.equals(String.class) + (ObservabilityPipelineDedupeProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSampleProcessor'", + "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'", e); } - // deserialize ObservabilityPipelineParseGrokProcessor + // deserialize ObservabilityPipelineEnrichmentTableProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(String.class)) { + if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class)) + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) - || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class)) + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineParseGrokProcessor.class.equals(String.class) + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineParseGrokProcessor.class); + .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineParseGrokProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineParseGrokProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineParseGrokProcessor'", + "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'", e); } - // deserialize ObservabilityPipelineSensitiveDataScannerProcessor + // deserialize ObservabilityPipelineGenerateMetricsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) { + if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( - Long.class)) + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) - || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( - Double.class)) + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class) + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class); + .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( Level.FINER, - "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'"); + "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'", + "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", e); } @@ -650,155 +596,204 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } - // deserialize ObservabilityPipelineAddEnvVarsProcessor + // deserialize ObservabilityPipelineParseGrokProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) - || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class) + (ObservabilityPipelineParseGrokProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class); + .readValueAs(ObservabilityPipelineParseGrokProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseGrokProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseGrokProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseGrokProcessor'", e); } - // deserialize ObservabilityPipelineDedupeProcessor + // deserialize ObservabilityPipelineParseJSONProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Long.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Float.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Double.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) - || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class) - || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineDedupeProcessor.class.equals(String.class) + (ObservabilityPipelineParseJSONProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineParseJSONProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'", e); } - // deserialize ObservabilityPipelineEnrichmentTableProcessor + // deserialize ObservabilityPipelineParseXMLProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) { + if (ObservabilityPipelineParseXMLProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Long.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Float.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Double.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)) + ((ObservabilityPipelineParseXMLProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) - || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)) + ((ObservabilityPipelineParseXMLProcessor.class.equals(Float.class) + || ObservabilityPipelineParseXMLProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + (ObservabilityPipelineParseXMLProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class) + (ObservabilityPipelineParseXMLProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class); + .readValueAs(ObservabilityPipelineParseXMLProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineParseXMLProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'"); + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseXMLProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'", + "Input data does not match schema 'ObservabilityPipelineParseXMLProcessor'", + e); + } + + // deserialize ObservabilityPipelineQuotaProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineQuotaProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'", e); } @@ -851,154 +846,363 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } - // deserialize ObservabilityPipelineThrottleProcessor + // deserialize ObservabilityPipelineRemoveFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Long.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Float.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Double.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) - || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) { + if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)) + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class) - || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)) + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineThrottleProcessor.class.equals(String.class) + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineThrottleProcessor.class); + .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'", + "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'", e); } - // deserialize ObservabilityPipelineCustomProcessor + // deserialize ObservabilityPipelineRenameFieldsProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineCustomProcessor.class.equals(Integer.class) - || ObservabilityPipelineCustomProcessor.class.equals(Long.class) - || ObservabilityPipelineCustomProcessor.class.equals(Float.class) - || ObservabilityPipelineCustomProcessor.class.equals(Double.class) - || ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) - || ObservabilityPipelineCustomProcessor.class.equals(String.class)) { + if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineCustomProcessor.class.equals(Integer.class) - || ObservabilityPipelineCustomProcessor.class.equals(Long.class)) + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineCustomProcessor.class.equals(Float.class) - || ObservabilityPipelineCustomProcessor.class.equals(Double.class)) + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineCustomProcessor.class.equals(Boolean.class) + (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineCustomProcessor.class.equals(String.class) + (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineCustomProcessor.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineCustomProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineCustomProcessor'"); + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineCustomProcessor'", + "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'", e); } - // deserialize ObservabilityPipelineDatadogTagsProcessor + // deserialize ObservabilityPipelineSampleProcessor try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class)) { + if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class) + || ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class) + || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSampleProcessor.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Integer.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Long.class)) + ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineDatadogTagsProcessor.class.equals(Float.class) - || ObservabilityPipelineDatadogTagsProcessor.class.equals(Double.class)) + ((ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineDatadogTagsProcessor.class.equals(Boolean.class) + (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineDatadogTagsProcessor.class.equals(String.class) + (ObservabilityPipelineSampleProcessor.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineDatadogTagsProcessor.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineDatadogTagsProcessor) tmp).unparsed) { + if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSampleProcessor'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) { deserialized = tmp; match++; } log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogTagsProcessor'"); + Level.FINER, + "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineDatadogTagsProcessor'", + "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'", + e); + } + + // deserialize ObservabilityPipelineSplitArrayProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplitArrayProcessor.class.equals(Integer.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Long.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Float.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Double.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplitArrayProcessor.class.equals(Integer.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplitArrayProcessor.class.equals(Float.class) + || ObservabilityPipelineSplitArrayProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplitArrayProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplitArrayProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSplitArrayProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplitArrayProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSplitArrayProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplitArrayProcessor'", + e); + } + + // deserialize ObservabilityPipelineThrottleProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineThrottleProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'", + e); + } + + // deserialize ObservabilityPipelineMetricTagsProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineMetricTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineMetricTagsProcessor.class.equals(Long.class) + || ObservabilityPipelineMetricTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineMetricTagsProcessor.class.equals(Double.class) + || ObservabilityPipelineMetricTagsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineMetricTagsProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineMetricTagsProcessor.class.equals(Integer.class) + || ObservabilityPipelineMetricTagsProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineMetricTagsProcessor.class.equals(Float.class) + || ObservabilityPipelineMetricTagsProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineMetricTagsProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineMetricTagsProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineMetricTagsProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineMetricTagsProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineMetricTagsProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineMetricTagsProcessor'", e); } @@ -1037,27 +1241,37 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineFilterProce setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddHostnameProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1067,7 +1281,7 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineGenerateMet setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1077,48 +1291,58 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseGrokPr setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem( - ObservabilityPipelineSensitiveDataScannerProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseXMLProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineCustomProcessor o) { + public ObservabilityPipelineConfigProcessorItem( + ObservabilityPipelineSensitiveDataScannerProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTagsProcessor o) { + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSplitArrayProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineMetricTagsProcessor o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -1127,57 +1351,69 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDatadogTags schemas.put( "ObservabilityPipelineFilterProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddEnvVarsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddFieldsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddHostnameProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineCustomProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDatadogTagsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDedupeProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineEnrichmentTableProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineGenerateMetricsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineOcsfMapperProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseGrokProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineParseJSONProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseXMLProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineQuotaProcessor", new GenericType() {}); schemas.put( - "ObservabilityPipelineAddFieldsProcessor", - new GenericType() {}); + "ObservabilityPipelineReduceProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineRemoveFieldsProcessor", new GenericType() {}); schemas.put( "ObservabilityPipelineRenameFieldsProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineGenerateMetricsProcessor", - new GenericType() {}); schemas.put( "ObservabilityPipelineSampleProcessor", new GenericType() {}); - schemas.put( - "ObservabilityPipelineParseGrokProcessor", - new GenericType() {}); schemas.put( "ObservabilityPipelineSensitiveDataScannerProcessor", new GenericType() {}); schemas.put( - "ObservabilityPipelineOcsfMapperProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineAddEnvVarsProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineDedupeProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineEnrichmentTableProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineReduceProcessor", - new GenericType() {}); + "ObservabilityPipelineSplitArrayProcessor", + new GenericType() {}); schemas.put( "ObservabilityPipelineThrottleProcessor", new GenericType() {}); schemas.put( - "ObservabilityPipelineCustomProcessor", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineDatadogTagsProcessor", - new GenericType() {}); + "ObservabilityPipelineMetricTagsProcessor", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigProcessorItem.class, Collections.unmodifiableMap(schemas)); } @@ -1190,15 +1426,17 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid * against the oneOf child schemas: ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, - * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, - * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, - * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor, - * ObservabilityPipelineDatadogTagsProcessor + * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor, + * ObservabilityPipelineAddHostnameProcessor, ObservabilityPipelineCustomProcessor, + * ObservabilityPipelineDatadogTagsProcessor, ObservabilityPipelineDedupeProcessor, + * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, + * ObservabilityPipelineMetricTagsProcessor * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1206,94 +1444,114 @@ public Map getSchemas() { @Override public void setActualInstance(Object instance) { if (JSON.isInstanceOf( - ObservabilityPipelineFilterProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineFilterProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAddHostnameProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineParseGrokProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineParseGrokProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineParseXMLProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSensitiveDataScannerProcessor.class, - instance, - new HashSet>())) { + ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSensitiveDataScannerProcessor.class, + instance, + new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineSplitArrayProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineCustomProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineDatadogTagsProcessor.class, instance, new HashSet>())) { + ObservabilityPipelineMetricTagsProcessor.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1304,42 +1562,48 @@ public void setActualInstance(Object instance) { } throw new RuntimeException( "Invalid instance type. Must be ObservabilityPipelineFilterProcessor," - + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor," - + " ObservabilityPipelineAddFieldsProcessor," - + " ObservabilityPipelineRemoveFieldsProcessor," - + " ObservabilityPipelineRenameFieldsProcessor," + + " ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor," + + " ObservabilityPipelineAddHostnameProcessor, ObservabilityPipelineCustomProcessor," + + " ObservabilityPipelineDatadogTagsProcessor, ObservabilityPipelineDedupeProcessor," + + " ObservabilityPipelineEnrichmentTableProcessor," + " ObservabilityPipelineGenerateMetricsProcessor," - + " ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor," + + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor," + + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor," + + " ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor," + + " ObservabilityPipelineRemoveFieldsProcessor," + + " ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineSampleProcessor," + " ObservabilityPipelineSensitiveDataScannerProcessor," - + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor," - + " ObservabilityPipelineDedupeProcessor," - + " ObservabilityPipelineEnrichmentTableProcessor," - + " ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor," - + " ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor"); + + " ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor," + + " ObservabilityPipelineMetricTagsProcessor"); } /** * Get the actual instance, which can be the following: ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, - * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, - * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, - * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, - * ObservabilityPipelineThrottleProcessor, ObservabilityPipelineCustomProcessor, - * ObservabilityPipelineDatadogTagsProcessor + * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor, + * ObservabilityPipelineAddHostnameProcessor, ObservabilityPipelineCustomProcessor, + * ObservabilityPipelineDatadogTagsProcessor, ObservabilityPipelineDedupeProcessor, + * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineParseXMLProcessor, + * ObservabilityPipelineQuotaProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, + * ObservabilityPipelineMetricTagsProcessor * * @return The actual instance (ObservabilityPipelineFilterProcessor, - * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, - * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, - * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineAddFieldsProcessor, + * ObservabilityPipelineAddHostnameProcessor, ObservabilityPipelineCustomProcessor, + * ObservabilityPipelineDatadogTagsProcessor, ObservabilityPipelineDedupeProcessor, + * ObservabilityPipelineEnrichmentTableProcessor, + * ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineOcsfMapperProcessor, + * ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + * ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, + * ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, + * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineSampleProcessor, * ObservabilityPipelineSensitiveDataScannerProcessor, - * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor, - * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, - * ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor, - * ObservabilityPipelineCustomProcessor, ObservabilityPipelineDatadogTagsProcessor) + * ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, + * ObservabilityPipelineMetricTagsProcessor) */ @Override public Object getActualInstance() { @@ -1359,27 +1623,15 @@ public ObservabilityPipelineFilterProcessor getObservabilityPipelineFilterProces } /** - * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is - * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown. - * - * @return The actual instance of `ObservabilityPipelineParseJSONProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor` - */ - public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor() - throws ClassCastException { - return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance(); - } - - /** - * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not - * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance + * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineQuotaProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor` + * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor` */ - public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor() + public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor() throws ClassCastException { - return (ObservabilityPipelineQuotaProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance(); } /** @@ -1395,27 +1647,65 @@ public ObservabilityPipelineAddFieldsProcessor getObservabilityPipelineAddFields } /** - * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance - * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAddHostnameProcessor`. If the actual instance + * is not `ObservabilityPipelineAddHostnameProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineAddHostnameProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddHostnameProcessor` */ - public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor() + public ObservabilityPipelineAddHostnameProcessor getObservabilityPipelineAddHostnameProcessor() throws ClassCastException { - return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineAddHostnameProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance - * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is + * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor` + * @return The actual instance of `ObservabilityPipelineCustomProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor` */ - public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor() + public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor() throws ClassCastException { - return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); + return (ObservabilityPipelineCustomProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance + * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor` + */ + public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor() + throws ClassCastException { + return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is + * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineDedupeProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor` + */ + public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor() + throws ClassCastException { + return (ObservabilityPipelineDedupeProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual + * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineEnrichmentTableProcessor` + */ + public ObservabilityPipelineEnrichmentTableProcessor + getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException { + return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance(); } /** @@ -1433,15 +1723,15 @@ public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRename } /** - * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is - * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance + * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSampleProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor` + * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor` */ - public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor() + public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor() throws ClassCastException { - return (ObservabilityPipelineSampleProcessor) super.getActualInstance(); + return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance(); } /** @@ -1457,79 +1747,113 @@ public ObservabilityPipelineParseGrokProcessor getObservabilityPipelineParseGrok } /** - * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual - * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException - * will be thrown. + * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is + * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineSensitiveDataScannerProcessor` + * @return The actual instance of `ObservabilityPipelineParseJSONProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor` */ - public ObservabilityPipelineSensitiveDataScannerProcessor - getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException { - return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance(); + public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor() + throws ClassCastException { + return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance - * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineParseXMLProcessor`. If the actual instance is + * not `ObservabilityPipelineParseXMLProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor` + * @return The actual instance of `ObservabilityPipelineParseXMLProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseXMLProcessor` */ - public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor() + public ObservabilityPipelineParseXMLProcessor getObservabilityPipelineParseXMLProcessor() throws ClassCastException { - return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance(); + return (ObservabilityPipelineParseXMLProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance - * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not + * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor` + * @return The actual instance of `ObservabilityPipelineQuotaProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor` */ - public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor() + public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor() throws ClassCastException { - return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance(); + return (ObservabilityPipelineQuotaProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is - * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is + * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineDedupeProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor` + * @return The actual instance of `ObservabilityPipelineReduceProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor` */ - public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor() + public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor() throws ClassCastException { - return (ObservabilityPipelineDedupeProcessor) super.getActualInstance(); + return (ObservabilityPipelineReduceProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual - * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be - * thrown. + * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor` + * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor` + */ + public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor() + throws ClassCastException { + return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor` + */ + public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor() + throws ClassCastException { + return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is + * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSampleProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor` + */ + public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor() + throws ClassCastException { + return (ObservabilityPipelineSampleProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual + * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException + * will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor` * @throws ClassCastException if the instance is not - * `ObservabilityPipelineEnrichmentTableProcessor` + * `ObservabilityPipelineSensitiveDataScannerProcessor` */ - public ObservabilityPipelineEnrichmentTableProcessor - getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException { - return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance(); + public ObservabilityPipelineSensitiveDataScannerProcessor + getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is - * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSplitArrayProcessor`. If the actual instance + * is not `ObservabilityPipelineSplitArrayProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineReduceProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor` + * @return The actual instance of `ObservabilityPipelineSplitArrayProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplitArrayProcessor` */ - public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor() + public ObservabilityPipelineSplitArrayProcessor getObservabilityPipelineSplitArrayProcessor() throws ClassCastException { - return (ObservabilityPipelineReduceProcessor) super.getActualInstance(); + return (ObservabilityPipelineSplitArrayProcessor) super.getActualInstance(); } /** @@ -1545,26 +1869,14 @@ public ObservabilityPipelineThrottleProcessor getObservabilityPipelineThrottlePr } /** - * Get the actual instance of `ObservabilityPipelineCustomProcessor`. If the actual instance is - * not `ObservabilityPipelineCustomProcessor`, the ClassCastException will be thrown. - * - * @return The actual instance of `ObservabilityPipelineCustomProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineCustomProcessor` - */ - public ObservabilityPipelineCustomProcessor getObservabilityPipelineCustomProcessor() - throws ClassCastException { - return (ObservabilityPipelineCustomProcessor) super.getActualInstance(); - } - - /** - * Get the actual instance of `ObservabilityPipelineDatadogTagsProcessor`. If the actual instance - * is not `ObservabilityPipelineDatadogTagsProcessor`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineMetricTagsProcessor`. If the actual instance + * is not `ObservabilityPipelineMetricTagsProcessor`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineDatadogTagsProcessor` - * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogTagsProcessor` + * @return The actual instance of `ObservabilityPipelineMetricTagsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineMetricTagsProcessor` */ - public ObservabilityPipelineDatadogTagsProcessor getObservabilityPipelineDatadogTagsProcessor() + public ObservabilityPipelineMetricTagsProcessor getObservabilityPipelineMetricTagsProcessor() throws ClassCastException { - return (ObservabilityPipelineDatadogTagsProcessor) super.getActualInstance(); + return (ObservabilityPipelineMetricTagsProcessor) super.getActualInstance(); } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java index 58c17babac0..15957348145 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java @@ -89,52 +89,6 @@ public ObservabilityPipelineConfigSourceItem deserialize( boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); int match = 0; JsonToken token = tree.traverse(jp.getCodec()).nextToken(); - // deserialize ObservabilityPipelineKafkaSource - try { - boolean attemptParsing = true; - // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineKafkaSource.class.equals(Integer.class) - || ObservabilityPipelineKafkaSource.class.equals(Long.class) - || ObservabilityPipelineKafkaSource.class.equals(Float.class) - || ObservabilityPipelineKafkaSource.class.equals(Double.class) - || ObservabilityPipelineKafkaSource.class.equals(Boolean.class) - || ObservabilityPipelineKafkaSource.class.equals(String.class)) { - attemptParsing = typeCoercion; - if (!attemptParsing) { - attemptParsing |= - ((ObservabilityPipelineKafkaSource.class.equals(Integer.class) - || ObservabilityPipelineKafkaSource.class.equals(Long.class)) - && token == JsonToken.VALUE_NUMBER_INT); - attemptParsing |= - ((ObservabilityPipelineKafkaSource.class.equals(Float.class) - || ObservabilityPipelineKafkaSource.class.equals(Double.class)) - && (token == JsonToken.VALUE_NUMBER_FLOAT - || token == JsonToken.VALUE_NUMBER_INT)); - attemptParsing |= - (ObservabilityPipelineKafkaSource.class.equals(Boolean.class) - && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); - attemptParsing |= - (ObservabilityPipelineKafkaSource.class.equals(String.class) - && token == JsonToken.VALUE_STRING); - } - } - if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaSource.class); - // TODO: there is no validation against JSON schema constraints - // (min, max, enum, pattern...), this does not perform a strict JSON - // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineKafkaSource) tmp).unparsed) { - deserialized = tmp; - match++; - } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaSource'"); - } - } catch (Exception e) { - // deserialization failed, continue - log.log( - Level.FINER, "Input data does not match schema 'ObservabilityPipelineKafkaSource'", e); - } - // deserialize ObservabilityPipelineDatadogAgentSource try { boolean attemptParsing = true; @@ -186,149 +140,152 @@ public ObservabilityPipelineConfigSourceItem deserialize( e); } - // deserialize ObservabilityPipelineSplunkTcpSource + // deserialize ObservabilityPipelineAmazonDataFirehoseSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) - || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class) - || ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) - || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class) - || ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) - || ObservabilityPipelineSplunkTcpSource.class.equals(String.class)) { + if (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) - || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class)) + ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) - || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class)) + ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) + (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSplunkTcpSource.class.equals(String.class) + (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkTcpSource.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAmazonDataFirehoseSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSplunkTcpSource) tmp).unparsed) { + if (!((ObservabilityPipelineAmazonDataFirehoseSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkTcpSource'"); + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineAmazonDataFirehoseSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSplunkTcpSource'", + "Input data does not match schema 'ObservabilityPipelineAmazonDataFirehoseSource'", e); } - // deserialize ObservabilityPipelineSplunkHecSource + // deserialize ObservabilityPipelineAmazonS3Source try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) - || ObservabilityPipelineSplunkHecSource.class.equals(Long.class) - || ObservabilityPipelineSplunkHecSource.class.equals(Float.class) - || ObservabilityPipelineSplunkHecSource.class.equals(Double.class) - || ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) - || ObservabilityPipelineSplunkHecSource.class.equals(String.class)) { + if (ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Long.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Double.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) + || ObservabilityPipelineAmazonS3Source.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) - || ObservabilityPipelineSplunkHecSource.class.equals(Long.class)) + ((ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSplunkHecSource.class.equals(Float.class) - || ObservabilityPipelineSplunkHecSource.class.equals(Double.class)) + ((ObservabilityPipelineAmazonS3Source.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) + (ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSplunkHecSource.class.equals(String.class) + (ObservabilityPipelineAmazonS3Source.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkHecSource.class); + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineAmazonS3Source.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSplunkHecSource) tmp).unparsed) { + if (!((ObservabilityPipelineAmazonS3Source) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Source'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSplunkHecSource'", + "Input data does not match schema 'ObservabilityPipelineAmazonS3Source'", e); } - // deserialize ObservabilityPipelineAmazonS3Source + // deserialize ObservabilityPipelineFluentBitSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) - || ObservabilityPipelineAmazonS3Source.class.equals(Long.class) - || ObservabilityPipelineAmazonS3Source.class.equals(Float.class) - || ObservabilityPipelineAmazonS3Source.class.equals(Double.class) - || ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) - || ObservabilityPipelineAmazonS3Source.class.equals(String.class)) { + if (ObservabilityPipelineFluentBitSource.class.equals(Integer.class) + || ObservabilityPipelineFluentBitSource.class.equals(Long.class) + || ObservabilityPipelineFluentBitSource.class.equals(Float.class) + || ObservabilityPipelineFluentBitSource.class.equals(Double.class) + || ObservabilityPipelineFluentBitSource.class.equals(Boolean.class) + || ObservabilityPipelineFluentBitSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) - || ObservabilityPipelineAmazonS3Source.class.equals(Long.class)) + ((ObservabilityPipelineFluentBitSource.class.equals(Integer.class) + || ObservabilityPipelineFluentBitSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAmazonS3Source.class.equals(Float.class) - || ObservabilityPipelineAmazonS3Source.class.equals(Double.class)) + ((ObservabilityPipelineFluentBitSource.class.equals(Float.class) + || ObservabilityPipelineFluentBitSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) + (ObservabilityPipelineFluentBitSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAmazonS3Source.class.equals(String.class) + (ObservabilityPipelineFluentBitSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineAmazonS3Source.class); + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFluentBitSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAmazonS3Source) tmp).unparsed) { + if (!((ObservabilityPipelineFluentBitSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Source'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFluentBitSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAmazonS3Source'", + "Input data does not match schema 'ObservabilityPipelineFluentBitSource'", e); } @@ -380,150 +337,246 @@ public ObservabilityPipelineConfigSourceItem deserialize( e); } - // deserialize ObservabilityPipelineFluentBitSource + // deserialize ObservabilityPipelineGooglePubSubSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineFluentBitSource.class.equals(Integer.class) - || ObservabilityPipelineFluentBitSource.class.equals(Long.class) - || ObservabilityPipelineFluentBitSource.class.equals(Float.class) - || ObservabilityPipelineFluentBitSource.class.equals(Double.class) - || ObservabilityPipelineFluentBitSource.class.equals(Boolean.class) - || ObservabilityPipelineFluentBitSource.class.equals(String.class)) { + if (ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Float.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineFluentBitSource.class.equals(Integer.class) - || ObservabilityPipelineFluentBitSource.class.equals(Long.class)) + ((ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineFluentBitSource.class.equals(Float.class) - || ObservabilityPipelineFluentBitSource.class.equals(Double.class)) + ((ObservabilityPipelineGooglePubSubSource.class.equals(Float.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineFluentBitSource.class.equals(Boolean.class) + (ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineFluentBitSource.class.equals(String.class) + (ObservabilityPipelineGooglePubSubSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFluentBitSource.class); + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGooglePubSubSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineFluentBitSource) tmp).unparsed) { + if (!((ObservabilityPipelineGooglePubSubSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFluentBitSource'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineGooglePubSubSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineFluentBitSource'", + "Input data does not match schema 'ObservabilityPipelineGooglePubSubSource'", e); } - // deserialize ObservabilityPipelineHttpServerSource + // deserialize ObservabilityPipelineHttpClientSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineHttpServerSource.class.equals(Integer.class) - || ObservabilityPipelineHttpServerSource.class.equals(Long.class) - || ObservabilityPipelineHttpServerSource.class.equals(Float.class) - || ObservabilityPipelineHttpServerSource.class.equals(Double.class) - || ObservabilityPipelineHttpServerSource.class.equals(Boolean.class) - || ObservabilityPipelineHttpServerSource.class.equals(String.class)) { + if (ObservabilityPipelineHttpClientSource.class.equals(Integer.class) + || ObservabilityPipelineHttpClientSource.class.equals(Long.class) + || ObservabilityPipelineHttpClientSource.class.equals(Float.class) + || ObservabilityPipelineHttpClientSource.class.equals(Double.class) + || ObservabilityPipelineHttpClientSource.class.equals(Boolean.class) + || ObservabilityPipelineHttpClientSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineHttpServerSource.class.equals(Integer.class) - || ObservabilityPipelineHttpServerSource.class.equals(Long.class)) + ((ObservabilityPipelineHttpClientSource.class.equals(Integer.class) + || ObservabilityPipelineHttpClientSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineHttpServerSource.class.equals(Float.class) - || ObservabilityPipelineHttpServerSource.class.equals(Double.class)) + ((ObservabilityPipelineHttpClientSource.class.equals(Float.class) + || ObservabilityPipelineHttpClientSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineHttpServerSource.class.equals(Boolean.class) + (ObservabilityPipelineHttpClientSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineHttpServerSource.class.equals(String.class) + (ObservabilityPipelineHttpClientSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpServerSource.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpClientSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineHttpServerSource) tmp).unparsed) { + if (!((ObservabilityPipelineHttpClientSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpServerSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpClientSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineHttpServerSource'", + "Input data does not match schema 'ObservabilityPipelineHttpClientSource'", e); } - // deserialize ObservabilityPipelineSumoLogicSource + // deserialize ObservabilityPipelineHttpServerSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSumoLogicSource.class.equals(Integer.class) - || ObservabilityPipelineSumoLogicSource.class.equals(Long.class) - || ObservabilityPipelineSumoLogicSource.class.equals(Float.class) - || ObservabilityPipelineSumoLogicSource.class.equals(Double.class) - || ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class) - || ObservabilityPipelineSumoLogicSource.class.equals(String.class)) { + if (ObservabilityPipelineHttpServerSource.class.equals(Integer.class) + || ObservabilityPipelineHttpServerSource.class.equals(Long.class) + || ObservabilityPipelineHttpServerSource.class.equals(Float.class) + || ObservabilityPipelineHttpServerSource.class.equals(Double.class) + || ObservabilityPipelineHttpServerSource.class.equals(Boolean.class) + || ObservabilityPipelineHttpServerSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSumoLogicSource.class.equals(Integer.class) - || ObservabilityPipelineSumoLogicSource.class.equals(Long.class)) + ((ObservabilityPipelineHttpServerSource.class.equals(Integer.class) + || ObservabilityPipelineHttpServerSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSumoLogicSource.class.equals(Float.class) - || ObservabilityPipelineSumoLogicSource.class.equals(Double.class)) + ((ObservabilityPipelineHttpServerSource.class.equals(Float.class) + || ObservabilityPipelineHttpServerSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class) + (ObservabilityPipelineHttpServerSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSumoLogicSource.class.equals(String.class) + (ObservabilityPipelineHttpServerSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSumoLogicSource.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpServerSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSumoLogicSource) tmp).unparsed) { + if (!((ObservabilityPipelineHttpServerSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpServerSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSumoLogicSource'", + "Input data does not match schema 'ObservabilityPipelineHttpServerSource'", + e); + } + + // deserialize ObservabilityPipelineKafkaSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineKafkaSource.class.equals(Integer.class) + || ObservabilityPipelineKafkaSource.class.equals(Long.class) + || ObservabilityPipelineKafkaSource.class.equals(Float.class) + || ObservabilityPipelineKafkaSource.class.equals(Double.class) + || ObservabilityPipelineKafkaSource.class.equals(Boolean.class) + || ObservabilityPipelineKafkaSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineKafkaSource.class.equals(Integer.class) + || ObservabilityPipelineKafkaSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineKafkaSource.class.equals(Float.class) + || ObservabilityPipelineKafkaSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineKafkaSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineKafkaSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineKafkaSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, "Input data does not match schema 'ObservabilityPipelineKafkaSource'", e); + } + + // deserialize ObservabilityPipelineLogstashSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineLogstashSource.class.equals(Integer.class) + || ObservabilityPipelineLogstashSource.class.equals(Long.class) + || ObservabilityPipelineLogstashSource.class.equals(Float.class) + || ObservabilityPipelineLogstashSource.class.equals(Double.class) + || ObservabilityPipelineLogstashSource.class.equals(Boolean.class) + || ObservabilityPipelineLogstashSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineLogstashSource.class.equals(Integer.class) + || ObservabilityPipelineLogstashSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineLogstashSource.class.equals(Float.class) + || ObservabilityPipelineLogstashSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineLogstashSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineLogstashSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineLogstashSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineLogstashSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineLogstashSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineLogstashSource'", e); } @@ -575,298 +628,296 @@ public ObservabilityPipelineConfigSourceItem deserialize( e); } - // deserialize ObservabilityPipelineSyslogNgSource + // deserialize ObservabilityPipelineSocketSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSyslogNgSource.class.equals(Integer.class) - || ObservabilityPipelineSyslogNgSource.class.equals(Long.class) - || ObservabilityPipelineSyslogNgSource.class.equals(Float.class) - || ObservabilityPipelineSyslogNgSource.class.equals(Double.class) - || ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class) - || ObservabilityPipelineSyslogNgSource.class.equals(String.class)) { + if (ObservabilityPipelineSocketSource.class.equals(Integer.class) + || ObservabilityPipelineSocketSource.class.equals(Long.class) + || ObservabilityPipelineSocketSource.class.equals(Float.class) + || ObservabilityPipelineSocketSource.class.equals(Double.class) + || ObservabilityPipelineSocketSource.class.equals(Boolean.class) + || ObservabilityPipelineSocketSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSyslogNgSource.class.equals(Integer.class) - || ObservabilityPipelineSyslogNgSource.class.equals(Long.class)) + ((ObservabilityPipelineSocketSource.class.equals(Integer.class) + || ObservabilityPipelineSocketSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSyslogNgSource.class.equals(Float.class) - || ObservabilityPipelineSyslogNgSource.class.equals(Double.class)) + ((ObservabilityPipelineSocketSource.class.equals(Float.class) + || ObservabilityPipelineSocketSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class) + (ObservabilityPipelineSocketSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSyslogNgSource.class.equals(String.class) + (ObservabilityPipelineSocketSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSyslogNgSource.class); + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSocketSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSyslogNgSource) tmp).unparsed) { + if (!((ObservabilityPipelineSocketSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSocketSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( - Level.FINER, - "Input data does not match schema 'ObservabilityPipelineSyslogNgSource'", - e); + Level.FINER, "Input data does not match schema 'ObservabilityPipelineSocketSource'", e); } - // deserialize ObservabilityPipelineAmazonDataFirehoseSource + // deserialize ObservabilityPipelineSplunkHecSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class) - || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class) - || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class) - || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class) - || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class) - || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class)) { + if (ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Long.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Float.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Double.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) + || ObservabilityPipelineSplunkHecSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class) - || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class)) + ((ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class) - || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class)) + ((ObservabilityPipelineSplunkHecSource.class.equals(Float.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class) + (ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class) + (ObservabilityPipelineSplunkHecSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineAmazonDataFirehoseSource.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkHecSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineAmazonDataFirehoseSource) tmp).unparsed) { + if (!((ObservabilityPipelineSplunkHecSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, - "Input data matches schema 'ObservabilityPipelineAmazonDataFirehoseSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineAmazonDataFirehoseSource'", + "Input data does not match schema 'ObservabilityPipelineSplunkHecSource'", e); } - // deserialize ObservabilityPipelineGooglePubSubSource + // deserialize ObservabilityPipelineSplunkTcpSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class) - || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class) - || ObservabilityPipelineGooglePubSubSource.class.equals(Float.class) - || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class) - || ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class) - || ObservabilityPipelineGooglePubSubSource.class.equals(String.class)) { + if (ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class) - || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class)) + ((ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineGooglePubSubSource.class.equals(Float.class) - || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class)) + ((ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class) + (ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineGooglePubSubSource.class.equals(String.class) + (ObservabilityPipelineSplunkTcpSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()) - .readValueAs(ObservabilityPipelineGooglePubSubSource.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkTcpSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineGooglePubSubSource) tmp).unparsed) { + if (!((ObservabilityPipelineSplunkTcpSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log( - Level.FINER, "Input data matches schema 'ObservabilityPipelineGooglePubSubSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkTcpSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineGooglePubSubSource'", + "Input data does not match schema 'ObservabilityPipelineSplunkTcpSource'", e); } - // deserialize ObservabilityPipelineHttpClientSource + // deserialize ObservabilityPipelineSumoLogicSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineHttpClientSource.class.equals(Integer.class) - || ObservabilityPipelineHttpClientSource.class.equals(Long.class) - || ObservabilityPipelineHttpClientSource.class.equals(Float.class) - || ObservabilityPipelineHttpClientSource.class.equals(Double.class) - || ObservabilityPipelineHttpClientSource.class.equals(Boolean.class) - || ObservabilityPipelineHttpClientSource.class.equals(String.class)) { + if (ObservabilityPipelineSumoLogicSource.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Long.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Float.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Double.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class) + || ObservabilityPipelineSumoLogicSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineHttpClientSource.class.equals(Integer.class) - || ObservabilityPipelineHttpClientSource.class.equals(Long.class)) + ((ObservabilityPipelineSumoLogicSource.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineHttpClientSource.class.equals(Float.class) - || ObservabilityPipelineHttpClientSource.class.equals(Double.class)) + ((ObservabilityPipelineSumoLogicSource.class.equals(Float.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineHttpClientSource.class.equals(Boolean.class) + (ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineHttpClientSource.class.equals(String.class) + (ObservabilityPipelineSumoLogicSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { tmp = - tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpClientSource.class); + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSumoLogicSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineHttpClientSource) tmp).unparsed) { + if (!((ObservabilityPipelineSumoLogicSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpClientSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineHttpClientSource'", + "Input data does not match schema 'ObservabilityPipelineSumoLogicSource'", e); } - // deserialize ObservabilityPipelineLogstashSource + // deserialize ObservabilityPipelineSyslogNgSource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineLogstashSource.class.equals(Integer.class) - || ObservabilityPipelineLogstashSource.class.equals(Long.class) - || ObservabilityPipelineLogstashSource.class.equals(Float.class) - || ObservabilityPipelineLogstashSource.class.equals(Double.class) - || ObservabilityPipelineLogstashSource.class.equals(Boolean.class) - || ObservabilityPipelineLogstashSource.class.equals(String.class)) { + if (ObservabilityPipelineSyslogNgSource.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Long.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Float.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Double.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class) + || ObservabilityPipelineSyslogNgSource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineLogstashSource.class.equals(Integer.class) - || ObservabilityPipelineLogstashSource.class.equals(Long.class)) + ((ObservabilityPipelineSyslogNgSource.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineLogstashSource.class.equals(Float.class) - || ObservabilityPipelineLogstashSource.class.equals(Double.class)) + ((ObservabilityPipelineSyslogNgSource.class.equals(Float.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineLogstashSource.class.equals(Boolean.class) + (ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineLogstashSource.class.equals(String.class) + (ObservabilityPipelineSyslogNgSource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineLogstashSource.class); + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSyslogNgSource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineLogstashSource) tmp).unparsed) { + if (!((ObservabilityPipelineSyslogNgSource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineLogstashSource'"); + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgSource'"); } } catch (Exception e) { // deserialization failed, continue log.log( Level.FINER, - "Input data does not match schema 'ObservabilityPipelineLogstashSource'", + "Input data does not match schema 'ObservabilityPipelineSyslogNgSource'", e); } - // deserialize ObservabilityPipelineSocketSource + // deserialize ObservabilityPipelineOpentelemetrySource try { boolean attemptParsing = true; // ensure that we respect type coercion as set on the client ObjectMapper - if (ObservabilityPipelineSocketSource.class.equals(Integer.class) - || ObservabilityPipelineSocketSource.class.equals(Long.class) - || ObservabilityPipelineSocketSource.class.equals(Float.class) - || ObservabilityPipelineSocketSource.class.equals(Double.class) - || ObservabilityPipelineSocketSource.class.equals(Boolean.class) - || ObservabilityPipelineSocketSource.class.equals(String.class)) { + if (ObservabilityPipelineOpentelemetrySource.class.equals(Integer.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Long.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Float.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Double.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Boolean.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(String.class)) { attemptParsing = typeCoercion; if (!attemptParsing) { attemptParsing |= - ((ObservabilityPipelineSocketSource.class.equals(Integer.class) - || ObservabilityPipelineSocketSource.class.equals(Long.class)) + ((ObservabilityPipelineOpentelemetrySource.class.equals(Integer.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Long.class)) && token == JsonToken.VALUE_NUMBER_INT); attemptParsing |= - ((ObservabilityPipelineSocketSource.class.equals(Float.class) - || ObservabilityPipelineSocketSource.class.equals(Double.class)) + ((ObservabilityPipelineOpentelemetrySource.class.equals(Float.class) + || ObservabilityPipelineOpentelemetrySource.class.equals(Double.class)) && (token == JsonToken.VALUE_NUMBER_FLOAT || token == JsonToken.VALUE_NUMBER_INT)); attemptParsing |= - (ObservabilityPipelineSocketSource.class.equals(Boolean.class) + (ObservabilityPipelineOpentelemetrySource.class.equals(Boolean.class) && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); attemptParsing |= - (ObservabilityPipelineSocketSource.class.equals(String.class) + (ObservabilityPipelineOpentelemetrySource.class.equals(String.class) && token == JsonToken.VALUE_STRING); } } if (attemptParsing) { - tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSocketSource.class); + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineOpentelemetrySource.class); // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - if (!((ObservabilityPipelineSocketSource) tmp).unparsed) { + if (!((ObservabilityPipelineOpentelemetrySource) tmp).unparsed) { deserialized = tmp; match++; } - log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSocketSource'"); + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineOpentelemetrySource'"); } } catch (Exception e) { // deserialization failed, continue log.log( - Level.FINER, "Input data does not match schema 'ObservabilityPipelineSocketSource'", e); + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineOpentelemetrySource'", + e); } ObservabilityPipelineConfigSourceItem ret = new ObservabilityPipelineConfigSourceItem(); @@ -899,37 +950,37 @@ public ObservabilityPipelineConfigSourceItem() { super("oneOf", Boolean.FALSE); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineKafkaSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineDatadogAgentSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineDatadogAgentSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonDataFirehoseSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkTcpSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonS3Source o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkHecSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentBitSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonS3Source o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentdSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentdSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineGooglePubSubSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentBitSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpClientSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -939,7 +990,12 @@ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpServerSour setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSumoLogicSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineKafkaSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineLogstashSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } @@ -949,84 +1005,87 @@ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineRsyslogSource setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSyslogNgSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSocketSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonDataFirehoseSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkHecSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineGooglePubSubSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkTcpSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpClientSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSumoLogicSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineLogstashSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSyslogNgSource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } - public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSocketSource o) { + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineOpentelemetrySource o) { super("oneOf", Boolean.FALSE); setActualInstance(o); } static { - schemas.put( - "ObservabilityPipelineKafkaSource", new GenericType() {}); schemas.put( "ObservabilityPipelineDatadogAgentSource", new GenericType() {}); schemas.put( - "ObservabilityPipelineSplunkTcpSource", - new GenericType() {}); - schemas.put( - "ObservabilityPipelineSplunkHecSource", - new GenericType() {}); + "ObservabilityPipelineAmazonDataFirehoseSource", + new GenericType() {}); schemas.put( "ObservabilityPipelineAmazonS3Source", new GenericType() {}); + schemas.put( + "ObservabilityPipelineFluentBitSource", + new GenericType() {}); schemas.put( "ObservabilityPipelineFluentdSource", new GenericType() {}); schemas.put( - "ObservabilityPipelineFluentBitSource", - new GenericType() {}); + "ObservabilityPipelineGooglePubSubSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineHttpClientSource", + new GenericType() {}); schemas.put( "ObservabilityPipelineHttpServerSource", new GenericType() {}); schemas.put( - "ObservabilityPipelineSumoLogicSource", - new GenericType() {}); + "ObservabilityPipelineKafkaSource", new GenericType() {}); + schemas.put( + "ObservabilityPipelineLogstashSource", + new GenericType() {}); schemas.put( "ObservabilityPipelineRsyslogSource", new GenericType() {}); schemas.put( - "ObservabilityPipelineSyslogNgSource", - new GenericType() {}); + "ObservabilityPipelineSocketSource", + new GenericType() {}); schemas.put( - "ObservabilityPipelineAmazonDataFirehoseSource", - new GenericType() {}); + "ObservabilityPipelineSplunkHecSource", + new GenericType() {}); schemas.put( - "ObservabilityPipelineGooglePubSubSource", - new GenericType() {}); + "ObservabilityPipelineSplunkTcpSource", + new GenericType() {}); schemas.put( - "ObservabilityPipelineHttpClientSource", - new GenericType() {}); + "ObservabilityPipelineSumoLogicSource", + new GenericType() {}); schemas.put( - "ObservabilityPipelineLogstashSource", - new GenericType() {}); + "ObservabilityPipelineSyslogNgSource", + new GenericType() {}); schemas.put( - "ObservabilityPipelineSocketSource", - new GenericType() {}); + "ObservabilityPipelineOpentelemetrySource", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigSourceItem.class, Collections.unmodifiableMap(schemas)); } @@ -1038,15 +1097,15 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid - * against the oneOf child schemas: ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, - * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source, - * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, - * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource, - * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, - * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, - * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, - * ObservabilityPipelineSocketSource + * against the oneOf child schemas: ObservabilityPipelineDatadogAgentSource, + * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source, + * ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource, + * ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource, + * ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource, + * ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource, + * ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource, + * ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource, + * ObservabilityPipelineSyslogNgSource, ObservabilityPipelineOpentelemetrySource * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -1054,37 +1113,37 @@ public Map getSchemas() { @Override public void setActualInstance(Object instance) { if (JSON.isInstanceOf( - ObservabilityPipelineKafkaSource.class, instance, new HashSet>())) { + ObservabilityPipelineDatadogAgentSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineDatadogAgentSource.class, instance, new HashSet>())) { + ObservabilityPipelineAmazonDataFirehoseSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSplunkTcpSource.class, instance, new HashSet>())) { + ObservabilityPipelineAmazonS3Source.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSplunkHecSource.class, instance, new HashSet>())) { + ObservabilityPipelineFluentBitSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAmazonS3Source.class, instance, new HashSet>())) { + ObservabilityPipelineFluentdSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineFluentdSource.class, instance, new HashSet>())) { + ObservabilityPipelineGooglePubSubSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineFluentBitSource.class, instance, new HashSet>())) { + ObservabilityPipelineHttpClientSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1094,7 +1153,12 @@ public void setActualInstance(Object instance) { return; } if (JSON.isInstanceOf( - ObservabilityPipelineSumoLogicSource.class, instance, new HashSet>())) { + ObservabilityPipelineKafkaSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineLogstashSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1104,32 +1168,32 @@ public void setActualInstance(Object instance) { return; } if (JSON.isInstanceOf( - ObservabilityPipelineSyslogNgSource.class, instance, new HashSet>())) { + ObservabilityPipelineSocketSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineAmazonDataFirehoseSource.class, instance, new HashSet>())) { + ObservabilityPipelineSplunkHecSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineGooglePubSubSource.class, instance, new HashSet>())) { + ObservabilityPipelineSplunkTcpSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineHttpClientSource.class, instance, new HashSet>())) { + ObservabilityPipelineSumoLogicSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineLogstashSource.class, instance, new HashSet>())) { + ObservabilityPipelineSyslogNgSource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } if (JSON.isInstanceOf( - ObservabilityPipelineSocketSource.class, instance, new HashSet>())) { + ObservabilityPipelineOpentelemetrySource.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } @@ -1139,55 +1203,43 @@ public void setActualInstance(Object instance) { return; } throw new RuntimeException( - "Invalid instance type. Must be ObservabilityPipelineKafkaSource," - + " ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource," - + " ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source," - + " ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource," - + " ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource," - + " ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource," - + " ObservabilityPipelineAmazonDataFirehoseSource," + "Invalid instance type. Must be ObservabilityPipelineDatadogAgentSource," + + " ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source," + + " ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource," + " ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource," - + " ObservabilityPipelineLogstashSource, ObservabilityPipelineSocketSource"); + + " ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource," + + " ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource," + + " ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource," + + " ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource," + + " ObservabilityPipelineSyslogNgSource, ObservabilityPipelineOpentelemetrySource"); } /** - * Get the actual instance, which can be the following: ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, - * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source, - * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, - * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource, - * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, - * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, - * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, - * ObservabilityPipelineSocketSource + * Get the actual instance, which can be the following: ObservabilityPipelineDatadogAgentSource, + * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source, + * ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource, + * ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource, + * ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource, + * ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource, + * ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource, + * ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource, + * ObservabilityPipelineSyslogNgSource, ObservabilityPipelineOpentelemetrySource * - * @return The actual instance (ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, - * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source, - * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, - * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource, - * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, - * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, - * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource, - * ObservabilityPipelineSocketSource) + * @return The actual instance (ObservabilityPipelineDatadogAgentSource, + * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source, + * ObservabilityPipelineFluentBitSource, ObservabilityPipelineFluentdSource, + * ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource, + * ObservabilityPipelineHttpServerSource, ObservabilityPipelineKafkaSource, + * ObservabilityPipelineLogstashSource, ObservabilityPipelineRsyslogSource, + * ObservabilityPipelineSocketSource, ObservabilityPipelineSplunkHecSource, + * ObservabilityPipelineSplunkTcpSource, ObservabilityPipelineSumoLogicSource, + * ObservabilityPipelineSyslogNgSource, ObservabilityPipelineOpentelemetrySource) */ @Override public Object getActualInstance() { return super.getActualInstance(); } - /** - * Get the actual instance of `ObservabilityPipelineKafkaSource`. If the actual instance is not - * `ObservabilityPipelineKafkaSource`, the ClassCastException will be thrown. - * - * @return The actual instance of `ObservabilityPipelineKafkaSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaSource` - */ - public ObservabilityPipelineKafkaSource getObservabilityPipelineKafkaSource() - throws ClassCastException { - return (ObservabilityPipelineKafkaSource) super.getActualInstance(); - } - /** * Get the actual instance of `ObservabilityPipelineDatadogAgentSource`. If the actual instance is * not `ObservabilityPipelineDatadogAgentSource`, the ClassCastException will be thrown. @@ -1201,27 +1253,17 @@ public ObservabilityPipelineDatadogAgentSource getObservabilityPipelineDatadogAg } /** - * Get the actual instance of `ObservabilityPipelineSplunkTcpSource`. If the actual instance is - * not `ObservabilityPipelineSplunkTcpSource`, the ClassCastException will be thrown. - * - * @return The actual instance of `ObservabilityPipelineSplunkTcpSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkTcpSource` - */ - public ObservabilityPipelineSplunkTcpSource getObservabilityPipelineSplunkTcpSource() - throws ClassCastException { - return (ObservabilityPipelineSplunkTcpSource) super.getActualInstance(); - } - - /** - * Get the actual instance of `ObservabilityPipelineSplunkHecSource`. If the actual instance is - * not `ObservabilityPipelineSplunkHecSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineAmazonDataFirehoseSource`. If the actual + * instance is not `ObservabilityPipelineAmazonDataFirehoseSource`, the ClassCastException will be + * thrown. * - * @return The actual instance of `ObservabilityPipelineSplunkHecSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecSource` + * @return The actual instance of `ObservabilityPipelineAmazonDataFirehoseSource` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineAmazonDataFirehoseSource` */ - public ObservabilityPipelineSplunkHecSource getObservabilityPipelineSplunkHecSource() - throws ClassCastException { - return (ObservabilityPipelineSplunkHecSource) super.getActualInstance(); + public ObservabilityPipelineAmazonDataFirehoseSource + getObservabilityPipelineAmazonDataFirehoseSource() throws ClassCastException { + return (ObservabilityPipelineAmazonDataFirehoseSource) super.getActualInstance(); } /** @@ -1236,6 +1278,18 @@ public ObservabilityPipelineAmazonS3Source getObservabilityPipelineAmazonS3Sourc return (ObservabilityPipelineAmazonS3Source) super.getActualInstance(); } + /** + * Get the actual instance of `ObservabilityPipelineFluentBitSource`. If the actual instance is + * not `ObservabilityPipelineFluentBitSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineFluentBitSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineFluentBitSource` + */ + public ObservabilityPipelineFluentBitSource getObservabilityPipelineFluentBitSource() + throws ClassCastException { + return (ObservabilityPipelineFluentBitSource) super.getActualInstance(); + } + /** * Get the actual instance of `ObservabilityPipelineFluentdSource`. If the actual instance is not * `ObservabilityPipelineFluentdSource`, the ClassCastException will be thrown. @@ -1249,15 +1303,27 @@ public ObservabilityPipelineFluentdSource getObservabilityPipelineFluentdSource( } /** - * Get the actual instance of `ObservabilityPipelineFluentBitSource`. If the actual instance is - * not `ObservabilityPipelineFluentBitSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineGooglePubSubSource`. If the actual instance is + * not `ObservabilityPipelineGooglePubSubSource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineFluentBitSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineFluentBitSource` + * @return The actual instance of `ObservabilityPipelineGooglePubSubSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineGooglePubSubSource` */ - public ObservabilityPipelineFluentBitSource getObservabilityPipelineFluentBitSource() + public ObservabilityPipelineGooglePubSubSource getObservabilityPipelineGooglePubSubSource() throws ClassCastException { - return (ObservabilityPipelineFluentBitSource) super.getActualInstance(); + return (ObservabilityPipelineGooglePubSubSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineHttpClientSource`. If the actual instance is + * not `ObservabilityPipelineHttpClientSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineHttpClientSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpClientSource` + */ + public ObservabilityPipelineHttpClientSource getObservabilityPipelineHttpClientSource() + throws ClassCastException { + return (ObservabilityPipelineHttpClientSource) super.getActualInstance(); } /** @@ -1273,15 +1339,27 @@ public ObservabilityPipelineHttpServerSource getObservabilityPipelineHttpServerS } /** - * Get the actual instance of `ObservabilityPipelineSumoLogicSource`. If the actual instance is - * not `ObservabilityPipelineSumoLogicSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineKafkaSource`. If the actual instance is not + * `ObservabilityPipelineKafkaSource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSumoLogicSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicSource` + * @return The actual instance of `ObservabilityPipelineKafkaSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaSource` */ - public ObservabilityPipelineSumoLogicSource getObservabilityPipelineSumoLogicSource() + public ObservabilityPipelineKafkaSource getObservabilityPipelineKafkaSource() throws ClassCastException { - return (ObservabilityPipelineSumoLogicSource) super.getActualInstance(); + return (ObservabilityPipelineKafkaSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineLogstashSource`. If the actual instance is not + * `ObservabilityPipelineLogstashSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineLogstashSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineLogstashSource` + */ + public ObservabilityPipelineLogstashSource getObservabilityPipelineLogstashSource() + throws ClassCastException { + return (ObservabilityPipelineLogstashSource) super.getActualInstance(); } /** @@ -1297,76 +1375,74 @@ public ObservabilityPipelineRsyslogSource getObservabilityPipelineRsyslogSource( } /** - * Get the actual instance of `ObservabilityPipelineSyslogNgSource`. If the actual instance is not - * `ObservabilityPipelineSyslogNgSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSocketSource`. If the actual instance is not + * `ObservabilityPipelineSocketSource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSyslogNgSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgSource` + * @return The actual instance of `ObservabilityPipelineSocketSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSocketSource` */ - public ObservabilityPipelineSyslogNgSource getObservabilityPipelineSyslogNgSource() + public ObservabilityPipelineSocketSource getObservabilityPipelineSocketSource() throws ClassCastException { - return (ObservabilityPipelineSyslogNgSource) super.getActualInstance(); + return (ObservabilityPipelineSocketSource) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineAmazonDataFirehoseSource`. If the actual - * instance is not `ObservabilityPipelineAmazonDataFirehoseSource`, the ClassCastException will be - * thrown. + * Get the actual instance of `ObservabilityPipelineSplunkHecSource`. If the actual instance is + * not `ObservabilityPipelineSplunkHecSource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineAmazonDataFirehoseSource` - * @throws ClassCastException if the instance is not - * `ObservabilityPipelineAmazonDataFirehoseSource` + * @return The actual instance of `ObservabilityPipelineSplunkHecSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecSource` */ - public ObservabilityPipelineAmazonDataFirehoseSource - getObservabilityPipelineAmazonDataFirehoseSource() throws ClassCastException { - return (ObservabilityPipelineAmazonDataFirehoseSource) super.getActualInstance(); + public ObservabilityPipelineSplunkHecSource getObservabilityPipelineSplunkHecSource() + throws ClassCastException { + return (ObservabilityPipelineSplunkHecSource) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineGooglePubSubSource`. If the actual instance is - * not `ObservabilityPipelineGooglePubSubSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSplunkTcpSource`. If the actual instance is + * not `ObservabilityPipelineSplunkTcpSource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineGooglePubSubSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineGooglePubSubSource` + * @return The actual instance of `ObservabilityPipelineSplunkTcpSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkTcpSource` */ - public ObservabilityPipelineGooglePubSubSource getObservabilityPipelineGooglePubSubSource() + public ObservabilityPipelineSplunkTcpSource getObservabilityPipelineSplunkTcpSource() throws ClassCastException { - return (ObservabilityPipelineGooglePubSubSource) super.getActualInstance(); + return (ObservabilityPipelineSplunkTcpSource) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineHttpClientSource`. If the actual instance is - * not `ObservabilityPipelineHttpClientSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSumoLogicSource`. If the actual instance is + * not `ObservabilityPipelineSumoLogicSource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineHttpClientSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpClientSource` + * @return The actual instance of `ObservabilityPipelineSumoLogicSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicSource` */ - public ObservabilityPipelineHttpClientSource getObservabilityPipelineHttpClientSource() + public ObservabilityPipelineSumoLogicSource getObservabilityPipelineSumoLogicSource() throws ClassCastException { - return (ObservabilityPipelineHttpClientSource) super.getActualInstance(); + return (ObservabilityPipelineSumoLogicSource) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineLogstashSource`. If the actual instance is not - * `ObservabilityPipelineLogstashSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineSyslogNgSource`. If the actual instance is not + * `ObservabilityPipelineSyslogNgSource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineLogstashSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineLogstashSource` + * @return The actual instance of `ObservabilityPipelineSyslogNgSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgSource` */ - public ObservabilityPipelineLogstashSource getObservabilityPipelineLogstashSource() + public ObservabilityPipelineSyslogNgSource getObservabilityPipelineSyslogNgSource() throws ClassCastException { - return (ObservabilityPipelineLogstashSource) super.getActualInstance(); + return (ObservabilityPipelineSyslogNgSource) super.getActualInstance(); } /** - * Get the actual instance of `ObservabilityPipelineSocketSource`. If the actual instance is not - * `ObservabilityPipelineSocketSource`, the ClassCastException will be thrown. + * Get the actual instance of `ObservabilityPipelineOpentelemetrySource`. If the actual instance + * is not `ObservabilityPipelineOpentelemetrySource`, the ClassCastException will be thrown. * - * @return The actual instance of `ObservabilityPipelineSocketSource` - * @throws ClassCastException if the instance is not `ObservabilityPipelineSocketSource` + * @return The actual instance of `ObservabilityPipelineOpentelemetrySource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOpentelemetrySource` */ - public ObservabilityPipelineSocketSource getObservabilityPipelineSocketSource() + public ObservabilityPipelineOpentelemetrySource getObservabilityPipelineOpentelemetrySource() throws ClassCastException { - return (ObservabilityPipelineSocketSource) super.getActualInstance(); + return (ObservabilityPipelineOpentelemetrySource) super.getActualInstance(); } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java index 5709d5ab340..20fb23651d9 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCrowdStrikeNextGenSiemDestination.java @@ -22,6 +22,8 @@ /** * The crowdstrike_next_gen_siem destination forwards logs to CrowdStrike Next Gen * SIEM. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineCrowdStrikeNextGenSiemDestination.JSON_PROPERTY_COMPRESSION, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java index 1855d3fa5ef..9e6e905d6f8 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCustomProcessor.java @@ -23,6 +23,8 @@ * The custom_processor processor transforms events using Vector Remap Language (VRL) scripts with * advanced filtering capabilities. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineCustomProcessor.JSON_PROPERTY_DISPLAY_NAME, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java index f7a44ea620b..18c20a617f4 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The datadog_agent source collects logs from the Datadog Agent. */ +/** + * The datadog_agent source collects logs/metrics from the Datadog Agent. + * + *

Supported pipeline types: logs, metrics + */ @JsonPropertyOrder({ ObservabilityPipelineDatadogAgentSource.JSON_PROPERTY_ID, ObservabilityPipelineDatadogAgentSource.JSON_PROPERTY_TLS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java index c408c2da16f..c8f06e398af 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The datadog_logs destination forwards logs to Datadog Log Management. */ +/** + * The datadog_logs destination forwards logs to Datadog Log Management. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineDatadogLogsDestination.JSON_PROPERTY_ID, ObservabilityPipelineDatadogLogsDestination.JSON_PROPERTY_INPUTS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestination.java new file mode 100644 index 00000000000..17a7a529012 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestination.java @@ -0,0 +1,224 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The datadog_metrics destination forwards metrics to Datadog. + * + *

Supported pipeline types: metrics + */ +@JsonPropertyOrder({ + ObservabilityPipelineDatadogMetricsDestination.JSON_PROPERTY_ID, + ObservabilityPipelineDatadogMetricsDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineDatadogMetricsDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineDatadogMetricsDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineDatadogMetricsDestinationType type = + ObservabilityPipelineDatadogMetricsDestinationType.DATADOG_METRICS; + + public ObservabilityPipelineDatadogMetricsDestination() {} + + @JsonCreator + public ObservabilityPipelineDatadogMetricsDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineDatadogMetricsDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineDatadogMetricsDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineDatadogMetricsDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineDatadogMetricsDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineDatadogMetricsDestination type( + ObservabilityPipelineDatadogMetricsDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be datadog_metrics. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDatadogMetricsDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineDatadogMetricsDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineDatadogMetricsDestination + */ + @JsonAnySetter + public ObservabilityPipelineDatadogMetricsDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineDatadogMetricsDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineDatadogMetricsDestination observabilityPipelineDatadogMetricsDestination = + (ObservabilityPipelineDatadogMetricsDestination) o; + return Objects.equals(this.id, observabilityPipelineDatadogMetricsDestination.id) + && Objects.equals(this.inputs, observabilityPipelineDatadogMetricsDestination.inputs) + && Objects.equals(this.type, observabilityPipelineDatadogMetricsDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineDatadogMetricsDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineDatadogMetricsDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestinationType.java new file mode 100644 index 00000000000..e4ff1e92bca --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogMetricsDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be datadog_metrics. */ +@JsonSerialize( + using = + ObservabilityPipelineDatadogMetricsDestinationType + .ObservabilityPipelineDatadogMetricsDestinationTypeSerializer.class) +public class ObservabilityPipelineDatadogMetricsDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("datadog_metrics")); + + public static final ObservabilityPipelineDatadogMetricsDestinationType DATADOG_METRICS = + new ObservabilityPipelineDatadogMetricsDestinationType("datadog_metrics"); + + ObservabilityPipelineDatadogMetricsDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineDatadogMetricsDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineDatadogMetricsDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineDatadogMetricsDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineDatadogMetricsDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineDatadogMetricsDestinationType fromValue(String value) { + return new ObservabilityPipelineDatadogMetricsDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java index 4bd1a931dc6..8337c9980ea 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogTagsProcessor.java @@ -21,6 +21,8 @@ /** * The datadog_tags processor includes or excludes specific Datadog tags in your logs. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineDatadogTagsProcessor.JSON_PROPERTY_ACTION, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java index 593b6c0ddfb..84a8c6e58e7 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The dedupe processor removes duplicate fields in log events. */ +/** + * The dedupe processor removes duplicate fields in log events. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_DISPLAY_NAME, ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_ENABLED, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java index c414b55e50e..af6f8d805d0 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java @@ -19,10 +19,15 @@ import java.util.Map; import java.util.Objects; -/** The elasticsearch destination writes logs to an Elasticsearch cluster. */ +/** + * The elasticsearch destination writes logs to an Elasticsearch cluster. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_API_VERSION, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_BULK_INDEX, + ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_DATA_STREAM, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_ID, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_INPUTS, ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_TYPE @@ -37,6 +42,9 @@ public class ObservabilityPipelineElasticsearchDestination { public static final String JSON_PROPERTY_BULK_INDEX = "bulk_index"; private String bulkIndex; + public static final String JSON_PROPERTY_DATA_STREAM = "data_stream"; + private ObservabilityPipelineElasticsearchDestinationDataStream dataStream; + public static final String JSON_PROPERTY_ID = "id"; private String id; @@ -108,6 +116,29 @@ public void setBulkIndex(String bulkIndex) { this.bulkIndex = bulkIndex; } + public ObservabilityPipelineElasticsearchDestination dataStream( + ObservabilityPipelineElasticsearchDestinationDataStream dataStream) { + this.dataStream = dataStream; + this.unparsed |= dataStream.unparsed; + return this; + } + + /** + * Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + * + * @return dataStream + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DATA_STREAM) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineElasticsearchDestinationDataStream getDataStream() { + return dataStream; + } + + public void setDataStream(ObservabilityPipelineElasticsearchDestinationDataStream dataStream) { + this.dataStream = dataStream; + } + public ObservabilityPipelineElasticsearchDestination id(String id) { this.id = id; return this; @@ -238,6 +269,7 @@ public boolean equals(Object o) { (ObservabilityPipelineElasticsearchDestination) o; return Objects.equals(this.apiVersion, observabilityPipelineElasticsearchDestination.apiVersion) && Objects.equals(this.bulkIndex, observabilityPipelineElasticsearchDestination.bulkIndex) + && Objects.equals(this.dataStream, observabilityPipelineElasticsearchDestination.dataStream) && Objects.equals(this.id, observabilityPipelineElasticsearchDestination.id) && Objects.equals(this.inputs, observabilityPipelineElasticsearchDestination.inputs) && Objects.equals(this.type, observabilityPipelineElasticsearchDestination.type) @@ -248,7 +280,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(apiVersion, bulkIndex, id, inputs, type, additionalProperties); + return Objects.hash(apiVersion, bulkIndex, dataStream, id, inputs, type, additionalProperties); } @Override @@ -257,6 +289,7 @@ public String toString() { sb.append("class ObservabilityPipelineElasticsearchDestination {\n"); sb.append(" apiVersion: ").append(toIndentedString(apiVersion)).append("\n"); sb.append(" bulkIndex: ").append(toIndentedString(bulkIndex)).append("\n"); + sb.append(" dataStream: ").append(toIndentedString(dataStream)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java new file mode 100644 index 00000000000..013ee28fc95 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationDataStream.java @@ -0,0 +1,203 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. */ +@JsonPropertyOrder({ + ObservabilityPipelineElasticsearchDestinationDataStream.JSON_PROPERTY_DATASET, + ObservabilityPipelineElasticsearchDestinationDataStream.JSON_PROPERTY_DTYPE, + ObservabilityPipelineElasticsearchDestinationDataStream.JSON_PROPERTY_NAMESPACE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineElasticsearchDestinationDataStream { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DATASET = "dataset"; + private String dataset; + + public static final String JSON_PROPERTY_DTYPE = "dtype"; + private String dtype; + + public static final String JSON_PROPERTY_NAMESPACE = "namespace"; + private String namespace; + + public ObservabilityPipelineElasticsearchDestinationDataStream dataset(String dataset) { + this.dataset = dataset; + return this; + } + + /** + * The data stream dataset for your logs. This groups logs by their source or application. + * + * @return dataset + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DATASET) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDataset() { + return dataset; + } + + public void setDataset(String dataset) { + this.dataset = dataset; + } + + public ObservabilityPipelineElasticsearchDestinationDataStream dtype(String dtype) { + this.dtype = dtype; + return this; + } + + /** + * The data stream type for your logs. This determines how logs are categorized within the data + * stream. + * + * @return dtype + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DTYPE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDtype() { + return dtype; + } + + public void setDtype(String dtype) { + this.dtype = dtype; + } + + public ObservabilityPipelineElasticsearchDestinationDataStream namespace(String namespace) { + this.namespace = namespace; + return this; + } + + /** + * The data stream namespace for your logs. This separates logs into different environments or + * domains. + * + * @return namespace + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_NAMESPACE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getNamespace() { + return namespace; + } + + public void setNamespace(String namespace) { + this.namespace = namespace; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineElasticsearchDestinationDataStream + */ + @JsonAnySetter + public ObservabilityPipelineElasticsearchDestinationDataStream putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineElasticsearchDestinationDataStream object is equal to + * o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineElasticsearchDestinationDataStream + observabilityPipelineElasticsearchDestinationDataStream = + (ObservabilityPipelineElasticsearchDestinationDataStream) o; + return Objects.equals( + this.dataset, observabilityPipelineElasticsearchDestinationDataStream.dataset) + && Objects.equals(this.dtype, observabilityPipelineElasticsearchDestinationDataStream.dtype) + && Objects.equals( + this.namespace, observabilityPipelineElasticsearchDestinationDataStream.namespace) + && Objects.equals( + this.additionalProperties, + observabilityPipelineElasticsearchDestinationDataStream.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(dataset, dtype, namespace, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineElasticsearchDestinationDataStream {\n"); + sb.append(" dataset: ").append(toIndentedString(dataset)).append("\n"); + sb.append(" dtype: ").append(toIndentedString(dtype)).append("\n"); + sb.append(" namespace: ").append(toIndentedString(namespace)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java index e19f2eb045c..17f3ab5f4f6 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java @@ -18,8 +18,11 @@ import java.util.Objects; /** - * The enrichment_table processor enriches logs using a static CSV file or GeoIP - * database. + * The enrichment_table processor enriches logs using a static CSV file, GeoIP + * database, or reference table. Exactly one of file, geoip, or + * reference_table must be configured. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_DISPLAY_NAME, @@ -28,6 +31,7 @@ ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_GEOIP, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_ID, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_REFERENCE_TABLE, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_TARGET, ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_TYPE }) @@ -53,6 +57,9 @@ public class ObservabilityPipelineEnrichmentTableProcessor { public static final String JSON_PROPERTY_INCLUDE = "include"; private String include; + public static final String JSON_PROPERTY_REFERENCE_TABLE = "reference_table"; + private ObservabilityPipelineEnrichmentTableReferenceTable referenceTable; + public static final String JSON_PROPERTY_TARGET = "target"; private String target; @@ -205,6 +212,29 @@ public void setInclude(String include) { this.include = include; } + public ObservabilityPipelineEnrichmentTableProcessor referenceTable( + ObservabilityPipelineEnrichmentTableReferenceTable referenceTable) { + this.referenceTable = referenceTable; + this.unparsed |= referenceTable.unparsed; + return this; + } + + /** + * Uses a Datadog reference table to enrich logs. + * + * @return referenceTable + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_REFERENCE_TABLE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineEnrichmentTableReferenceTable getReferenceTable() { + return referenceTable; + } + + public void setReferenceTable(ObservabilityPipelineEnrichmentTableReferenceTable referenceTable) { + this.referenceTable = referenceTable; + } + public ObservabilityPipelineEnrichmentTableProcessor target(String target) { this.target = target; return this; @@ -315,6 +345,8 @@ public boolean equals(Object o) { && Objects.equals(this.geoip, observabilityPipelineEnrichmentTableProcessor.geoip) && Objects.equals(this.id, observabilityPipelineEnrichmentTableProcessor.id) && Objects.equals(this.include, observabilityPipelineEnrichmentTableProcessor.include) + && Objects.equals( + this.referenceTable, observabilityPipelineEnrichmentTableProcessor.referenceTable) && Objects.equals(this.target, observabilityPipelineEnrichmentTableProcessor.target) && Objects.equals(this.type, observabilityPipelineEnrichmentTableProcessor.type) && Objects.equals( @@ -325,7 +357,16 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - displayName, enabled, file, geoip, id, include, target, type, additionalProperties); + displayName, + enabled, + file, + geoip, + id, + include, + referenceTable, + target, + type, + additionalProperties); } @Override @@ -338,6 +379,7 @@ public String toString() { sb.append(" geoip: ").append(toIndentedString(geoip)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" referenceTable: ").append(toIndentedString(referenceTable)).append("\n"); sb.append(" target: ").append(toIndentedString(target)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java new file mode 100644 index 00000000000..8577ddcb78c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableReferenceTable.java @@ -0,0 +1,219 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Uses a Datadog reference table to enrich logs. */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableReferenceTable.JSON_PROPERTY_COLUMNS, + ObservabilityPipelineEnrichmentTableReferenceTable.JSON_PROPERTY_KEY_FIELD, + ObservabilityPipelineEnrichmentTableReferenceTable.JSON_PROPERTY_TABLE_ID +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableReferenceTable { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_COLUMNS = "columns"; + private List columns = null; + + public static final String JSON_PROPERTY_KEY_FIELD = "key_field"; + private String keyField; + + public static final String JSON_PROPERTY_TABLE_ID = "table_id"; + private String tableId; + + public ObservabilityPipelineEnrichmentTableReferenceTable() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableReferenceTable( + @JsonProperty(required = true, value = JSON_PROPERTY_KEY_FIELD) String keyField, + @JsonProperty(required = true, value = JSON_PROPERTY_TABLE_ID) String tableId) { + this.keyField = keyField; + this.tableId = tableId; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable columns(List columns) { + this.columns = columns; + return this; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable addColumnsItem(String columnsItem) { + if (this.columns == null) { + this.columns = new ArrayList<>(); + } + this.columns.add(columnsItem); + return this; + } + + /** + * List of column names to include from the reference table. If not provided, all columns are + * included. + * + * @return columns + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_COLUMNS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getColumns() { + return columns; + } + + public void setColumns(List columns) { + this.columns = columns; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable keyField(String keyField) { + this.keyField = keyField; + return this; + } + + /** + * Path to the field in the log event to match against the reference table. + * + * @return keyField + */ + @JsonProperty(JSON_PROPERTY_KEY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getKeyField() { + return keyField; + } + + public void setKeyField(String keyField) { + this.keyField = keyField; + } + + public ObservabilityPipelineEnrichmentTableReferenceTable tableId(String tableId) { + this.tableId = tableId; + return this; + } + + /** + * The unique identifier of the reference table. + * + * @return tableId + */ + @JsonProperty(JSON_PROPERTY_TABLE_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTableId() { + return tableId; + } + + public void setTableId(String tableId) { + this.tableId = tableId; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableReferenceTable + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableReferenceTable putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineEnrichmentTableReferenceTable object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableReferenceTable + observabilityPipelineEnrichmentTableReferenceTable = + (ObservabilityPipelineEnrichmentTableReferenceTable) o; + return Objects.equals(this.columns, observabilityPipelineEnrichmentTableReferenceTable.columns) + && Objects.equals( + this.keyField, observabilityPipelineEnrichmentTableReferenceTable.keyField) + && Objects.equals(this.tableId, observabilityPipelineEnrichmentTableReferenceTable.tableId) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableReferenceTable.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(columns, keyField, tableId, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableReferenceTable {\n"); + sb.append(" columns: ").append(toIndentedString(columns)).append("\n"); + sb.append(" keyField: ").append(toIndentedString(keyField)).append("\n"); + sb.append(" tableId: ").append(toIndentedString(tableId)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java index cfbd5a5b3fa..f433d07dc86 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java @@ -18,8 +18,11 @@ import java.util.Objects; /** - * The filter processor allows conditional processing of logs based on a Datadog search - * query. Logs that match the include query are passed through; others are discarded. + * The filter processor allows conditional processing of logs/metrics based on a + * Datadog search query. Logs/metrics that match the include query are passed through; + * others are discarded. + * + *

Supported pipeline types: logs, metrics */ @JsonPropertyOrder({ ObservabilityPipelineFilterProcessor.JSON_PROPERTY_DISPLAY_NAME, @@ -132,8 +135,8 @@ public ObservabilityPipelineFilterProcessor include(String include) { } /** - * A Datadog search query used to determine which logs should pass through the filter. Logs that - * match this query continue to downstream components; others are dropped. + * A Datadog search query used to determine which logs/metrics should pass through the filter. + * Logs/metrics that match this query continue to downstream components; others are dropped. * * @return include */ diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java index 5ec1382b4be..3135fdde97a 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The fluent_bit source ingests logs from Fluent Bit. */ +/** + * The fluent_bit source ingests logs from Fluent Bit. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineFluentBitSource.JSON_PROPERTY_ID, ObservabilityPipelineFluentBitSource.JSON_PROPERTY_TLS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java index 16111fa18ac..7ef3df02faa 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The fluentd source ingests logs from a Fluentd-compatible service. */ +/** + * The fluentd source ingests logs from a Fluentd-compatible service. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineFluentdSource.JSON_PROPERTY_ID, ObservabilityPipelineFluentdSource.JSON_PROPERTY_TLS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java index aa333b96abb..38659631ce9 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java @@ -23,6 +23,8 @@ * The generate_datadog_metrics processor creates custom metrics from logs and sends * them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by log * fields. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_DISPLAY_NAME, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java index 7f5b75c361c..42e408347d2 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The google_chronicle destination sends logs to Google Chronicle. */ +/** + * The google_chronicle destination sends logs to Google Chronicle. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_AUTH, ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_CUSTOMER_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java index 032593f4086..b66adc5727a 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java @@ -22,6 +22,8 @@ /** * The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) * bucket. It requires a bucket name, GCP authentication, and metadata fields. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_ACL, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java index e72df907d63..877c5b6c6d4 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The google_pubsub destination publishes logs to a Google Cloud Pub/Sub topic. */ +/** + * The google_pubsub destination publishes logs to a Google Cloud Pub/Sub topic. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineGooglePubSubDestination.JSON_PROPERTY_AUTH, ObservabilityPipelineGooglePubSubDestination.JSON_PROPERTY_ENCODING, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java index 0fb06ee38a7..aa7c288698e 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription. */ +/** + * The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_AUTH, ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_DECODING, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestination.java new file mode 100644 index 00000000000..0a94ae3bc26 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestination.java @@ -0,0 +1,350 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The http_client destination sends data to an HTTP endpoint. + * + *

Supported pipeline types: logs, metrics + */ +@JsonPropertyOrder({ + ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_AUTH_STRATEGY, + ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_COMPRESSION, + ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_ID, + ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_TLS, + ObservabilityPipelineHttpClientDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineHttpClientDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH_STRATEGY = "auth_strategy"; + private ObservabilityPipelineHttpClientDestinationAuthStrategy authStrategy; + + public static final String JSON_PROPERTY_COMPRESSION = "compression"; + private ObservabilityPipelineHttpClientDestinationCompression compression; + + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineHttpClientDestinationEncoding encoding; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineHttpClientDestinationType type = + ObservabilityPipelineHttpClientDestinationType.HTTP_CLIENT; + + public ObservabilityPipelineHttpClientDestination() {} + + @JsonCreator + public ObservabilityPipelineHttpClientDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ENCODING) + ObservabilityPipelineHttpClientDestinationEncoding encoding, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineHttpClientDestinationType type) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineHttpClientDestination authStrategy( + ObservabilityPipelineHttpClientDestinationAuthStrategy authStrategy) { + this.authStrategy = authStrategy; + this.unparsed |= !authStrategy.isValid(); + return this; + } + + /** + * HTTP authentication strategy. + * + * @return authStrategy + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTH_STRATEGY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineHttpClientDestinationAuthStrategy getAuthStrategy() { + return authStrategy; + } + + public void setAuthStrategy(ObservabilityPipelineHttpClientDestinationAuthStrategy authStrategy) { + if (!authStrategy.isValid()) { + this.unparsed = true; + } + this.authStrategy = authStrategy; + } + + public ObservabilityPipelineHttpClientDestination compression( + ObservabilityPipelineHttpClientDestinationCompression compression) { + this.compression = compression; + this.unparsed |= compression.unparsed; + return this; + } + + /** + * Compression configuration for HTTP requests. + * + * @return compression + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_COMPRESSION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineHttpClientDestinationCompression getCompression() { + return compression; + } + + public void setCompression(ObservabilityPipelineHttpClientDestinationCompression compression) { + this.compression = compression; + } + + public ObservabilityPipelineHttpClientDestination encoding( + ObservabilityPipelineHttpClientDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * Encoding format for log events. + * + * @return encoding + */ + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineHttpClientDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineHttpClientDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineHttpClientDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineHttpClientDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineHttpClientDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineHttpClientDestination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineHttpClientDestination type( + ObservabilityPipelineHttpClientDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be http_client. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineHttpClientDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineHttpClientDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineHttpClientDestination + */ + @JsonAnySetter + public ObservabilityPipelineHttpClientDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineHttpClientDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineHttpClientDestination observabilityPipelineHttpClientDestination = + (ObservabilityPipelineHttpClientDestination) o; + return Objects.equals( + this.authStrategy, observabilityPipelineHttpClientDestination.authStrategy) + && Objects.equals(this.compression, observabilityPipelineHttpClientDestination.compression) + && Objects.equals(this.encoding, observabilityPipelineHttpClientDestination.encoding) + && Objects.equals(this.id, observabilityPipelineHttpClientDestination.id) + && Objects.equals(this.inputs, observabilityPipelineHttpClientDestination.inputs) + && Objects.equals(this.tls, observabilityPipelineHttpClientDestination.tls) + && Objects.equals(this.type, observabilityPipelineHttpClientDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineHttpClientDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + authStrategy, compression, encoding, id, inputs, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineHttpClientDestination {\n"); + sb.append(" authStrategy: ").append(toIndentedString(authStrategy)).append("\n"); + sb.append(" compression: ").append(toIndentedString(compression)).append("\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationAuthStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationAuthStrategy.java new file mode 100644 index 00000000000..25db15c60df --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationAuthStrategy.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** HTTP authentication strategy. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpClientDestinationAuthStrategy + .ObservabilityPipelineHttpClientDestinationAuthStrategySerializer.class) +public class ObservabilityPipelineHttpClientDestinationAuthStrategy extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("none", "basic", "bearer")); + + public static final ObservabilityPipelineHttpClientDestinationAuthStrategy NONE = + new ObservabilityPipelineHttpClientDestinationAuthStrategy("none"); + public static final ObservabilityPipelineHttpClientDestinationAuthStrategy BASIC = + new ObservabilityPipelineHttpClientDestinationAuthStrategy("basic"); + public static final ObservabilityPipelineHttpClientDestinationAuthStrategy BEARER = + new ObservabilityPipelineHttpClientDestinationAuthStrategy("bearer"); + + ObservabilityPipelineHttpClientDestinationAuthStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpClientDestinationAuthStrategySerializer + extends StdSerializer { + public ObservabilityPipelineHttpClientDestinationAuthStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpClientDestinationAuthStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpClientDestinationAuthStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpClientDestinationAuthStrategy fromValue(String value) { + return new ObservabilityPipelineHttpClientDestinationAuthStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompression.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompression.java new file mode 100644 index 00000000000..839d55c5f8f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompression.java @@ -0,0 +1,159 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Compression configuration for HTTP requests. */ +@JsonPropertyOrder({ObservabilityPipelineHttpClientDestinationCompression.JSON_PROPERTY_ALGORITHM}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineHttpClientDestinationCompression { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ALGORITHM = "algorithm"; + private ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm; + + public ObservabilityPipelineHttpClientDestinationCompression() {} + + @JsonCreator + public ObservabilityPipelineHttpClientDestinationCompression( + @JsonProperty(required = true, value = JSON_PROPERTY_ALGORITHM) + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm) { + this.algorithm = algorithm; + this.unparsed |= !algorithm.isValid(); + } + + public ObservabilityPipelineHttpClientDestinationCompression algorithm( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm) { + this.algorithm = algorithm; + this.unparsed |= !algorithm.isValid(); + return this; + } + + /** + * Compression algorithm. + * + * @return algorithm + */ + @JsonProperty(JSON_PROPERTY_ALGORITHM) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineHttpClientDestinationCompressionAlgorithm getAlgorithm() { + return algorithm; + } + + public void setAlgorithm( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm algorithm) { + if (!algorithm.isValid()) { + this.unparsed = true; + } + this.algorithm = algorithm; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineHttpClientDestinationCompression + */ + @JsonAnySetter + public ObservabilityPipelineHttpClientDestinationCompression putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineHttpClientDestinationCompression object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineHttpClientDestinationCompression + observabilityPipelineHttpClientDestinationCompression = + (ObservabilityPipelineHttpClientDestinationCompression) o; + return Objects.equals( + this.algorithm, observabilityPipelineHttpClientDestinationCompression.algorithm) + && Objects.equals( + this.additionalProperties, + observabilityPipelineHttpClientDestinationCompression.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(algorithm, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineHttpClientDestinationCompression {\n"); + sb.append(" algorithm: ").append(toIndentedString(algorithm)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.java new file mode 100644 index 00000000000..f8cd66fb4f6 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Compression algorithm. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm + .ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer.class) +public class ObservabilityPipelineHttpClientDestinationCompressionAlgorithm + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("gzip")); + + public static final ObservabilityPipelineHttpClientDestinationCompressionAlgorithm GZIP = + new ObservabilityPipelineHttpClientDestinationCompressionAlgorithm("gzip"); + + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer + extends StdSerializer { + public ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpClientDestinationCompressionAlgorithmSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpClientDestinationCompressionAlgorithm fromValue( + String value) { + return new ObservabilityPipelineHttpClientDestinationCompressionAlgorithm(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationEncoding.java new file mode 100644 index 00000000000..21ae289f9dd --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationEncoding.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Encoding format for log events. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpClientDestinationEncoding + .ObservabilityPipelineHttpClientDestinationEncodingSerializer.class) +public class ObservabilityPipelineHttpClientDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("json")); + + public static final ObservabilityPipelineHttpClientDestinationEncoding JSON = + new ObservabilityPipelineHttpClientDestinationEncoding("json"); + + ObservabilityPipelineHttpClientDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpClientDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineHttpClientDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpClientDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpClientDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpClientDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineHttpClientDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationType.java new file mode 100644 index 00000000000..992b5925d6b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be http_client. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpClientDestinationType + .ObservabilityPipelineHttpClientDestinationTypeSerializer.class) +public class ObservabilityPipelineHttpClientDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("http_client")); + + public static final ObservabilityPipelineHttpClientDestinationType HTTP_CLIENT = + new ObservabilityPipelineHttpClientDestinationType("http_client"); + + ObservabilityPipelineHttpClientDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpClientDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineHttpClientDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpClientDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpClientDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpClientDestinationType fromValue(String value) { + return new ObservabilityPipelineHttpClientDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java index 9cabf897e0a..3b1c3a9a1b4 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The http_client source scrapes logs from HTTP endpoints at regular intervals. */ +/** + * The http_client source scrapes logs from HTTP endpoints at regular intervals. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineHttpClientSource.JSON_PROPERTY_AUTH_STRATEGY, ObservabilityPipelineHttpClientSource.JSON_PROPERTY_DECODING, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceAuthStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceAuthStrategy.java index 30e2ad6f59a..02647e6a336 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceAuthStrategy.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceAuthStrategy.java @@ -26,8 +26,10 @@ public class ObservabilityPipelineHttpClientSourceAuthStrategy extends ModelEnum { private static final Set allowedValues = - new HashSet(Arrays.asList("basic", "bearer")); + new HashSet(Arrays.asList("none", "basic", "bearer")); + public static final ObservabilityPipelineHttpClientSourceAuthStrategy NONE = + new ObservabilityPipelineHttpClientSourceAuthStrategy("none"); public static final ObservabilityPipelineHttpClientSourceAuthStrategy BASIC = new ObservabilityPipelineHttpClientSourceAuthStrategy("basic"); public static final ObservabilityPipelineHttpClientSourceAuthStrategy BEARER = diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java index a3b47a71c7e..bcd3e06767e 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The http_server source collects logs over HTTP POST from external services. */ +/** + * The http_server source collects logs over HTTP POST from external services. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineHttpServerSource.JSON_PROPERTY_AUTH_STRATEGY, ObservabilityPipelineHttpServerSource.JSON_PROPERTY_DECODING, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java new file mode 100644 index 00000000000..c79502d5f65 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestination.java @@ -0,0 +1,599 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The kafka destination sends logs to Apache Kafka topics. + * + *

Supported pipeline types: logs + */ +@JsonPropertyOrder({ + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_COMPRESSION, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_HEADERS_KEY, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_ID, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_KEY_FIELD, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_LIBRDKAFKA_OPTIONS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_MESSAGE_TIMEOUT_MS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_DURATION_SECS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_RATE_LIMIT_NUM, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SASL, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_SOCKET_TIMEOUT_MS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TLS, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TOPIC, + ObservabilityPipelineKafkaDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineKafkaDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_COMPRESSION = "compression"; + private ObservabilityPipelineKafkaDestinationCompression compression; + + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineKafkaDestinationEncoding encoding; + + public static final String JSON_PROPERTY_HEADERS_KEY = "headers_key"; + private String headersKey; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEY_FIELD = "key_field"; + private String keyField; + + public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options"; + private List librdkafkaOptions = null; + + public static final String JSON_PROPERTY_MESSAGE_TIMEOUT_MS = "message_timeout_ms"; + private Long messageTimeoutMs; + + public static final String JSON_PROPERTY_RATE_LIMIT_DURATION_SECS = "rate_limit_duration_secs"; + private Long rateLimitDurationSecs; + + public static final String JSON_PROPERTY_RATE_LIMIT_NUM = "rate_limit_num"; + private Long rateLimitNum; + + public static final String JSON_PROPERTY_SASL = "sasl"; + private ObservabilityPipelineKafkaSasl sasl; + + public static final String JSON_PROPERTY_SOCKET_TIMEOUT_MS = "socket_timeout_ms"; + private Long socketTimeoutMs; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TOPIC = "topic"; + private String topic; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineKafkaDestinationType type = + ObservabilityPipelineKafkaDestinationType.KAFKA; + + public ObservabilityPipelineKafkaDestination() {} + + @JsonCreator + public ObservabilityPipelineKafkaDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ENCODING) + ObservabilityPipelineKafkaDestinationEncoding encoding, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TOPIC) String topic, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineKafkaDestinationType type) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + this.id = id; + this.inputs = inputs; + this.topic = topic; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineKafkaDestination compression( + ObservabilityPipelineKafkaDestinationCompression compression) { + this.compression = compression; + this.unparsed |= !compression.isValid(); + return this; + } + + /** + * Compression codec for Kafka messages. + * + * @return compression + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_COMPRESSION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineKafkaDestinationCompression getCompression() { + return compression; + } + + public void setCompression(ObservabilityPipelineKafkaDestinationCompression compression) { + if (!compression.isValid()) { + this.unparsed = true; + } + this.compression = compression; + } + + public ObservabilityPipelineKafkaDestination encoding( + ObservabilityPipelineKafkaDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * Encoding format for log events. + * + * @return encoding + */ + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineKafkaDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineKafkaDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineKafkaDestination headersKey(String headersKey) { + this.headersKey = headersKey; + return this; + } + + /** + * The field name to use for Kafka message headers. + * + * @return headersKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HEADERS_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHeadersKey() { + return headersKey; + } + + public void setHeadersKey(String headersKey) { + this.headersKey = headersKey; + } + + public ObservabilityPipelineKafkaDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineKafkaDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineKafkaDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineKafkaDestination keyField(String keyField) { + this.keyField = keyField; + return this; + } + + /** + * The field name to use as the Kafka message key. + * + * @return keyField + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEY_FIELD) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getKeyField() { + return keyField; + } + + public void setKeyField(String keyField) { + this.keyField = keyField; + } + + public ObservabilityPipelineKafkaDestination librdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineKafkaDestination addLibrdkafkaOptionsItem( + ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) { + if (this.librdkafkaOptions == null) { + this.librdkafkaOptions = new ArrayList<>(); + } + this.librdkafkaOptions.add(librdkafkaOptionsItem); + this.unparsed |= librdkafkaOptionsItem.unparsed; + return this; + } + + /** + * Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + * + * @return librdkafkaOptions + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getLibrdkafkaOptions() { + return librdkafkaOptions; + } + + public void setLibrdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + } + + public ObservabilityPipelineKafkaDestination messageTimeoutMs(Long messageTimeoutMs) { + this.messageTimeoutMs = messageTimeoutMs; + return this; + } + + /** + * Maximum time in milliseconds to wait for message delivery confirmation. minimum: 1 + * + * @return messageTimeoutMs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_MESSAGE_TIMEOUT_MS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getMessageTimeoutMs() { + return messageTimeoutMs; + } + + public void setMessageTimeoutMs(Long messageTimeoutMs) { + this.messageTimeoutMs = messageTimeoutMs; + } + + public ObservabilityPipelineKafkaDestination rateLimitDurationSecs(Long rateLimitDurationSecs) { + this.rateLimitDurationSecs = rateLimitDurationSecs; + return this; + } + + /** + * Duration in seconds for the rate limit window. minimum: 1 + * + * @return rateLimitDurationSecs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_RATE_LIMIT_DURATION_SECS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getRateLimitDurationSecs() { + return rateLimitDurationSecs; + } + + public void setRateLimitDurationSecs(Long rateLimitDurationSecs) { + this.rateLimitDurationSecs = rateLimitDurationSecs; + } + + public ObservabilityPipelineKafkaDestination rateLimitNum(Long rateLimitNum) { + this.rateLimitNum = rateLimitNum; + return this; + } + + /** + * Maximum number of messages allowed per rate limit duration. minimum: 1 + * + * @return rateLimitNum + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_RATE_LIMIT_NUM) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getRateLimitNum() { + return rateLimitNum; + } + + public void setRateLimitNum(Long rateLimitNum) { + this.rateLimitNum = rateLimitNum; + } + + public ObservabilityPipelineKafkaDestination sasl(ObservabilityPipelineKafkaSasl sasl) { + this.sasl = sasl; + this.unparsed |= sasl.unparsed; + return this; + } + + /** + * Specifies the SASL mechanism for authenticating with a Kafka cluster. + * + * @return sasl + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SASL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineKafkaSasl getSasl() { + return sasl; + } + + public void setSasl(ObservabilityPipelineKafkaSasl sasl) { + this.sasl = sasl; + } + + public ObservabilityPipelineKafkaDestination socketTimeoutMs(Long socketTimeoutMs) { + this.socketTimeoutMs = socketTimeoutMs; + return this; + } + + /** + * Socket timeout in milliseconds for network requests. minimum: 10 maximum: 300000 + * + * @return socketTimeoutMs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SOCKET_TIMEOUT_MS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getSocketTimeoutMs() { + return socketTimeoutMs; + } + + public void setSocketTimeoutMs(Long socketTimeoutMs) { + this.socketTimeoutMs = socketTimeoutMs; + } + + public ObservabilityPipelineKafkaDestination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineKafkaDestination topic(String topic) { + this.topic = topic; + return this; + } + + /** + * The Kafka topic name to publish logs to. + * + * @return topic + */ + @JsonProperty(JSON_PROPERTY_TOPIC) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTopic() { + return topic; + } + + public void setTopic(String topic) { + this.topic = topic; + } + + public ObservabilityPipelineKafkaDestination type( + ObservabilityPipelineKafkaDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be kafka. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineKafkaDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineKafkaDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineKafkaDestination + */ + @JsonAnySetter + public ObservabilityPipelineKafkaDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineKafkaDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineKafkaDestination observabilityPipelineKafkaDestination = + (ObservabilityPipelineKafkaDestination) o; + return Objects.equals(this.compression, observabilityPipelineKafkaDestination.compression) + && Objects.equals(this.encoding, observabilityPipelineKafkaDestination.encoding) + && Objects.equals(this.headersKey, observabilityPipelineKafkaDestination.headersKey) + && Objects.equals(this.id, observabilityPipelineKafkaDestination.id) + && Objects.equals(this.inputs, observabilityPipelineKafkaDestination.inputs) + && Objects.equals(this.keyField, observabilityPipelineKafkaDestination.keyField) + && Objects.equals( + this.librdkafkaOptions, observabilityPipelineKafkaDestination.librdkafkaOptions) + && Objects.equals( + this.messageTimeoutMs, observabilityPipelineKafkaDestination.messageTimeoutMs) + && Objects.equals( + this.rateLimitDurationSecs, observabilityPipelineKafkaDestination.rateLimitDurationSecs) + && Objects.equals(this.rateLimitNum, observabilityPipelineKafkaDestination.rateLimitNum) + && Objects.equals(this.sasl, observabilityPipelineKafkaDestination.sasl) + && Objects.equals( + this.socketTimeoutMs, observabilityPipelineKafkaDestination.socketTimeoutMs) + && Objects.equals(this.tls, observabilityPipelineKafkaDestination.tls) + && Objects.equals(this.topic, observabilityPipelineKafkaDestination.topic) + && Objects.equals(this.type, observabilityPipelineKafkaDestination.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineKafkaDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + compression, + encoding, + headersKey, + id, + inputs, + keyField, + librdkafkaOptions, + messageTimeoutMs, + rateLimitDurationSecs, + rateLimitNum, + sasl, + socketTimeoutMs, + tls, + topic, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineKafkaDestination {\n"); + sb.append(" compression: ").append(toIndentedString(compression)).append("\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" headersKey: ").append(toIndentedString(headersKey)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keyField: ").append(toIndentedString(keyField)).append("\n"); + sb.append(" librdkafkaOptions: ").append(toIndentedString(librdkafkaOptions)).append("\n"); + sb.append(" messageTimeoutMs: ").append(toIndentedString(messageTimeoutMs)).append("\n"); + sb.append(" rateLimitDurationSecs: ") + .append(toIndentedString(rateLimitDurationSecs)) + .append("\n"); + sb.append(" rateLimitNum: ").append(toIndentedString(rateLimitNum)).append("\n"); + sb.append(" sasl: ").append(toIndentedString(sasl)).append("\n"); + sb.append(" socketTimeoutMs: ").append(toIndentedString(socketTimeoutMs)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" topic: ").append(toIndentedString(topic)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java new file mode 100644 index 00000000000..0a66e9a713f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationCompression.java @@ -0,0 +1,71 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Compression codec for Kafka messages. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationCompression + .ObservabilityPipelineKafkaDestinationCompressionSerializer.class) +public class ObservabilityPipelineKafkaDestinationCompression extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("none", "gzip", "snappy", "lz4", "zstd")); + + public static final ObservabilityPipelineKafkaDestinationCompression NONE = + new ObservabilityPipelineKafkaDestinationCompression("none"); + public static final ObservabilityPipelineKafkaDestinationCompression GZIP = + new ObservabilityPipelineKafkaDestinationCompression("gzip"); + public static final ObservabilityPipelineKafkaDestinationCompression SNAPPY = + new ObservabilityPipelineKafkaDestinationCompression("snappy"); + public static final ObservabilityPipelineKafkaDestinationCompression LZ4 = + new ObservabilityPipelineKafkaDestinationCompression("lz4"); + public static final ObservabilityPipelineKafkaDestinationCompression ZSTD = + new ObservabilityPipelineKafkaDestinationCompression("zstd"); + + ObservabilityPipelineKafkaDestinationCompression(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationCompressionSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationCompressionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationCompressionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationCompression value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationCompression fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationCompression(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java new file mode 100644 index 00000000000..57abf5f828e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationEncoding.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Encoding format for log events. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationEncoding + .ObservabilityPipelineKafkaDestinationEncodingSerializer.class) +public class ObservabilityPipelineKafkaDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("json", "raw_message")); + + public static final ObservabilityPipelineKafkaDestinationEncoding JSON = + new ObservabilityPipelineKafkaDestinationEncoding("json"); + public static final ObservabilityPipelineKafkaDestinationEncoding RAW_MESSAGE = + new ObservabilityPipelineKafkaDestinationEncoding("raw_message"); + + ObservabilityPipelineKafkaDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java new file mode 100644 index 00000000000..0967bbb11c5 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be kafka. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaDestinationType + .ObservabilityPipelineKafkaDestinationTypeSerializer.class) +public class ObservabilityPipelineKafkaDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("kafka")); + + public static final ObservabilityPipelineKafkaDestinationType KAFKA = + new ObservabilityPipelineKafkaDestinationType("kafka"); + + ObservabilityPipelineKafkaDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaDestinationType fromValue(String value) { + return new ObservabilityPipelineKafkaDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java similarity index 78% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java index e7211139e49..8d4ed2a1d60 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaLibrdkafkaOption.java @@ -19,15 +19,15 @@ /** * Represents a key-value pair used to configure low-level librdkafka client options - * for Kafka sources, such as timeouts, buffer sizes, and security settings. + * for Kafka source and destination, such as timeouts, buffer sizes, and security settings. */ @JsonPropertyOrder({ - ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_NAME, - ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_VALUE + ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_NAME, + ObservabilityPipelineKafkaLibrdkafkaOption.JSON_PROPERTY_VALUE }) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineKafkaSourceLibrdkafkaOption { +public class ObservabilityPipelineKafkaLibrdkafkaOption { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_NAME = "name"; private String name; @@ -35,17 +35,17 @@ public class ObservabilityPipelineKafkaSourceLibrdkafkaOption { public static final String JSON_PROPERTY_VALUE = "value"; private String value; - public ObservabilityPipelineKafkaSourceLibrdkafkaOption() {} + public ObservabilityPipelineKafkaLibrdkafkaOption() {} @JsonCreator - public ObservabilityPipelineKafkaSourceLibrdkafkaOption( + public ObservabilityPipelineKafkaLibrdkafkaOption( @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { this.name = name; this.value = value; } - public ObservabilityPipelineKafkaSourceLibrdkafkaOption name(String name) { + public ObservabilityPipelineKafkaLibrdkafkaOption name(String name) { this.name = name; return this; } @@ -65,7 +65,7 @@ public void setName(String name) { this.name = name; } - public ObservabilityPipelineKafkaSourceLibrdkafkaOption value(String value) { + public ObservabilityPipelineKafkaLibrdkafkaOption value(String value) { this.value = value; return this; } @@ -97,10 +97,10 @@ public void setValue(String value) { * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineKafkaSourceLibrdkafkaOption + * @return ObservabilityPipelineKafkaLibrdkafkaOption */ @JsonAnySetter - public ObservabilityPipelineKafkaSourceLibrdkafkaOption putAdditionalProperty( + public ObservabilityPipelineKafkaLibrdkafkaOption putAdditionalProperty( String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); @@ -132,7 +132,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineKafkaSourceLibrdkafkaOption object is equal to o. */ + /** Return true if this ObservabilityPipelineKafkaLibrdkafkaOption object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -141,14 +141,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineKafkaSourceLibrdkafkaOption - observabilityPipelineKafkaSourceLibrdkafkaOption = - (ObservabilityPipelineKafkaSourceLibrdkafkaOption) o; - return Objects.equals(this.name, observabilityPipelineKafkaSourceLibrdkafkaOption.name) - && Objects.equals(this.value, observabilityPipelineKafkaSourceLibrdkafkaOption.value) + ObservabilityPipelineKafkaLibrdkafkaOption observabilityPipelineKafkaLibrdkafkaOption = + (ObservabilityPipelineKafkaLibrdkafkaOption) o; + return Objects.equals(this.name, observabilityPipelineKafkaLibrdkafkaOption.name) + && Objects.equals(this.value, observabilityPipelineKafkaLibrdkafkaOption.value) && Objects.equals( this.additionalProperties, - observabilityPipelineKafkaSourceLibrdkafkaOption.additionalProperties); + observabilityPipelineKafkaLibrdkafkaOption.additionalProperties); } @Override @@ -159,7 +158,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineKafkaSourceLibrdkafkaOption {\n"); + sb.append("class ObservabilityPipelineKafkaLibrdkafkaOption {\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" value: ").append(toIndentedString(value)).append("\n"); sb.append(" additionalProperties: ") diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java similarity index 78% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java index 0475f35416b..89c8f57a3ac 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSasl.java @@ -17,16 +17,16 @@ import java.util.Objects; /** Specifies the SASL mechanism for authenticating with a Kafka cluster. */ -@JsonPropertyOrder({ObservabilityPipelineKafkaSourceSasl.JSON_PROPERTY_MECHANISM}) +@JsonPropertyOrder({ObservabilityPipelineKafkaSasl.JSON_PROPERTY_MECHANISM}) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineKafkaSourceSasl { +public class ObservabilityPipelineKafkaSasl { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_MECHANISM = "mechanism"; - private ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism; + private ObservabilityPipelineKafkaSaslMechanism mechanism; - public ObservabilityPipelineKafkaSourceSasl mechanism( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + public ObservabilityPipelineKafkaSasl mechanism( + ObservabilityPipelineKafkaSaslMechanism mechanism) { this.mechanism = mechanism; this.unparsed |= !mechanism.isValid(); return this; @@ -40,11 +40,11 @@ public ObservabilityPipelineKafkaSourceSasl mechanism( @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_MECHANISM) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public ObservabilityPipelinePipelineKafkaSourceSaslMechanism getMechanism() { + public ObservabilityPipelineKafkaSaslMechanism getMechanism() { return mechanism; } - public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + public void setMechanism(ObservabilityPipelineKafkaSaslMechanism mechanism) { if (!mechanism.isValid()) { this.unparsed = true; } @@ -63,10 +63,10 @@ public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism m * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineKafkaSourceSasl + * @return ObservabilityPipelineKafkaSasl */ @JsonAnySetter - public ObservabilityPipelineKafkaSourceSasl putAdditionalProperty(String key, Object value) { + public ObservabilityPipelineKafkaSasl putAdditionalProperty(String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); } @@ -97,7 +97,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineKafkaSourceSasl object is equal to o. */ + /** Return true if this ObservabilityPipelineKafkaSasl object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -106,11 +106,11 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineKafkaSourceSasl observabilityPipelineKafkaSourceSasl = - (ObservabilityPipelineKafkaSourceSasl) o; - return Objects.equals(this.mechanism, observabilityPipelineKafkaSourceSasl.mechanism) + ObservabilityPipelineKafkaSasl observabilityPipelineKafkaSasl = + (ObservabilityPipelineKafkaSasl) o; + return Objects.equals(this.mechanism, observabilityPipelineKafkaSasl.mechanism) && Objects.equals( - this.additionalProperties, observabilityPipelineKafkaSourceSasl.additionalProperties); + this.additionalProperties, observabilityPipelineKafkaSasl.additionalProperties); } @Override @@ -121,7 +121,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineKafkaSourceSasl {\n"); + sb.append("class ObservabilityPipelineKafkaSasl {\n"); sb.append(" mechanism: ").append(toIndentedString(mechanism)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java new file mode 100644 index 00000000000..8022d54681f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSaslMechanism.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** SASL mechanism used for Kafka authentication. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaSaslMechanism.ObservabilityPipelineKafkaSaslMechanismSerializer + .class) +public class ObservabilityPipelineKafkaSaslMechanism extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512")); + + public static final ObservabilityPipelineKafkaSaslMechanism PLAIN = + new ObservabilityPipelineKafkaSaslMechanism("PLAIN"); + public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_256 = + new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-256"); + public static final ObservabilityPipelineKafkaSaslMechanism SCRAMNOT_SHANOT_512 = + new ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-512"); + + ObservabilityPipelineKafkaSaslMechanism(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaSaslMechanismSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaSaslMechanismSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaSaslMechanismSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaSaslMechanism value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaSaslMechanism fromValue(String value) { + return new ObservabilityPipelineKafkaSaslMechanism(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java index d8c3ea6254d..a7099ea959e 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The kafka source ingests data from Apache Kafka topics. */ +/** + * The kafka source ingests data from Apache Kafka topics. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineKafkaSource.JSON_PROPERTY_GROUP_ID, ObservabilityPipelineKafkaSource.JSON_PROPERTY_ID, @@ -40,10 +44,10 @@ public class ObservabilityPipelineKafkaSource { private String id; public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options"; - private List librdkafkaOptions = null; + private List librdkafkaOptions = null; public static final String JSON_PROPERTY_SASL = "sasl"; - private ObservabilityPipelineKafkaSourceSasl sasl; + private ObservabilityPipelineKafkaSasl sasl; public static final String JSON_PROPERTY_TLS = "tls"; private ObservabilityPipelineTls tls; @@ -112,16 +116,16 @@ public void setId(String id) { } public ObservabilityPipelineKafkaSource librdkafkaOptions( - List librdkafkaOptions) { + List librdkafkaOptions) { this.librdkafkaOptions = librdkafkaOptions; - for (ObservabilityPipelineKafkaSourceLibrdkafkaOption item : librdkafkaOptions) { + for (ObservabilityPipelineKafkaLibrdkafkaOption item : librdkafkaOptions) { this.unparsed |= item.unparsed; } return this; } public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem( - ObservabilityPipelineKafkaSourceLibrdkafkaOption librdkafkaOptionsItem) { + ObservabilityPipelineKafkaLibrdkafkaOption librdkafkaOptionsItem) { if (this.librdkafkaOptions == null) { this.librdkafkaOptions = new ArrayList<>(); } @@ -138,16 +142,16 @@ public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem( @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public List getLibrdkafkaOptions() { + public List getLibrdkafkaOptions() { return librdkafkaOptions; } public void setLibrdkafkaOptions( - List librdkafkaOptions) { + List librdkafkaOptions) { this.librdkafkaOptions = librdkafkaOptions; } - public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSasl sasl) { + public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSasl sasl) { this.sasl = sasl; this.unparsed |= sasl.unparsed; return this; @@ -161,11 +165,11 @@ public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSas @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_SASL) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public ObservabilityPipelineKafkaSourceSasl getSasl() { + public ObservabilityPipelineKafkaSasl getSasl() { return sasl; } - public void setSasl(ObservabilityPipelineKafkaSourceSasl sasl) { + public void setSasl(ObservabilityPipelineKafkaSasl sasl) { this.sasl = sasl; } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java index 18a531220ef..4dfe9cdf425 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The logstash source ingests logs from a Logstash forwarder. */ +/** + * The logstash source ingests logs from a Logstash forwarder. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineLogstashSource.JSON_PROPERTY_ID, ObservabilityPipelineLogstashSource.JSON_PROPERTY_TLS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessor.java new file mode 100644 index 00000000000..69058ca10af --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessor.java @@ -0,0 +1,315 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The metric_tags processor filters metrics based on their tags using Datadog tag key + * patterns. + * + *

Supported pipeline types: metrics + */ +@JsonPropertyOrder({ + ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_RULES, + ObservabilityPipelineMetricTagsProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineMetricTagsProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_RULES = "rules"; + private List rules = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineMetricTagsProcessorType type = + ObservabilityPipelineMetricTagsProcessorType.METRIC_TAGS; + + public ObservabilityPipelineMetricTagsProcessor() {} + + @JsonCreator + public ObservabilityPipelineMetricTagsProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_RULES) + List rules, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineMetricTagsProcessorType type) { + this.enabled = enabled; + this.id = id; + this.include = include; + this.rules = rules; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineMetricTagsProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineMetricTagsProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineMetricTagsProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineMetricTagsProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which metrics this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineMetricTagsProcessor rules( + List rules) { + this.rules = rules; + for (ObservabilityPipelineMetricTagsProcessorRule item : rules) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineMetricTagsProcessor addRulesItem( + ObservabilityPipelineMetricTagsProcessorRule rulesItem) { + this.rules.add(rulesItem); + this.unparsed |= rulesItem.unparsed; + return this; + } + + /** + * A list of rules for filtering metric tags. + * + * @return rules + */ + @JsonProperty(JSON_PROPERTY_RULES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getRules() { + return rules; + } + + public void setRules(List rules) { + this.rules = rules; + } + + public ObservabilityPipelineMetricTagsProcessor type( + ObservabilityPipelineMetricTagsProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be metric_tags. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineMetricTagsProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineMetricTagsProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineMetricTagsProcessor + */ + @JsonAnySetter + public ObservabilityPipelineMetricTagsProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineMetricTagsProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineMetricTagsProcessor observabilityPipelineMetricTagsProcessor = + (ObservabilityPipelineMetricTagsProcessor) o; + return Objects.equals(this.displayName, observabilityPipelineMetricTagsProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineMetricTagsProcessor.enabled) + && Objects.equals(this.id, observabilityPipelineMetricTagsProcessor.id) + && Objects.equals(this.include, observabilityPipelineMetricTagsProcessor.include) + && Objects.equals(this.rules, observabilityPipelineMetricTagsProcessor.rules) + && Objects.equals(this.type, observabilityPipelineMetricTagsProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineMetricTagsProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, enabled, id, include, rules, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineMetricTagsProcessor {\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" rules: ").append(toIndentedString(rules)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRule.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRule.java new file mode 100644 index 00000000000..eadc83ffaca --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRule.java @@ -0,0 +1,254 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Defines a rule for filtering metric tags based on key patterns. */ +@JsonPropertyOrder({ + ObservabilityPipelineMetricTagsProcessorRule.JSON_PROPERTY_ACTION, + ObservabilityPipelineMetricTagsProcessorRule.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineMetricTagsProcessorRule.JSON_PROPERTY_KEYS, + ObservabilityPipelineMetricTagsProcessorRule.JSON_PROPERTY_MODE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineMetricTagsProcessorRule { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ACTION = "action"; + private ObservabilityPipelineMetricTagsProcessorRuleAction action; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_KEYS = "keys"; + private List keys = new ArrayList<>(); + + public static final String JSON_PROPERTY_MODE = "mode"; + private ObservabilityPipelineMetricTagsProcessorRuleMode mode; + + public ObservabilityPipelineMetricTagsProcessorRule() {} + + @JsonCreator + public ObservabilityPipelineMetricTagsProcessorRule( + @JsonProperty(required = true, value = JSON_PROPERTY_ACTION) + ObservabilityPipelineMetricTagsProcessorRuleAction action, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_KEYS) List keys, + @JsonProperty(required = true, value = JSON_PROPERTY_MODE) + ObservabilityPipelineMetricTagsProcessorRuleMode mode) { + this.action = action; + this.unparsed |= !action.isValid(); + this.include = include; + this.keys = keys; + this.mode = mode; + this.unparsed |= !mode.isValid(); + } + + public ObservabilityPipelineMetricTagsProcessorRule action( + ObservabilityPipelineMetricTagsProcessorRuleAction action) { + this.action = action; + this.unparsed |= !action.isValid(); + return this; + } + + /** + * The action to take on tags with matching keys. + * + * @return action + */ + @JsonProperty(JSON_PROPERTY_ACTION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineMetricTagsProcessorRuleAction getAction() { + return action; + } + + public void setAction(ObservabilityPipelineMetricTagsProcessorRuleAction action) { + if (!action.isValid()) { + this.unparsed = true; + } + this.action = action; + } + + public ObservabilityPipelineMetricTagsProcessorRule include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which metrics this rule targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineMetricTagsProcessorRule keys(List keys) { + this.keys = keys; + return this; + } + + public ObservabilityPipelineMetricTagsProcessorRule addKeysItem(String keysItem) { + this.keys.add(keysItem); + return this; + } + + /** + * A list of tag keys to include or exclude. + * + * @return keys + */ + @JsonProperty(JSON_PROPERTY_KEYS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getKeys() { + return keys; + } + + public void setKeys(List keys) { + this.keys = keys; + } + + public ObservabilityPipelineMetricTagsProcessorRule mode( + ObservabilityPipelineMetricTagsProcessorRuleMode mode) { + this.mode = mode; + this.unparsed |= !mode.isValid(); + return this; + } + + /** + * The processing mode for tag filtering. + * + * @return mode + */ + @JsonProperty(JSON_PROPERTY_MODE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineMetricTagsProcessorRuleMode getMode() { + return mode; + } + + public void setMode(ObservabilityPipelineMetricTagsProcessorRuleMode mode) { + if (!mode.isValid()) { + this.unparsed = true; + } + this.mode = mode; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineMetricTagsProcessorRule + */ + @JsonAnySetter + public ObservabilityPipelineMetricTagsProcessorRule putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineMetricTagsProcessorRule object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineMetricTagsProcessorRule observabilityPipelineMetricTagsProcessorRule = + (ObservabilityPipelineMetricTagsProcessorRule) o; + return Objects.equals(this.action, observabilityPipelineMetricTagsProcessorRule.action) + && Objects.equals(this.include, observabilityPipelineMetricTagsProcessorRule.include) + && Objects.equals(this.keys, observabilityPipelineMetricTagsProcessorRule.keys) + && Objects.equals(this.mode, observabilityPipelineMetricTagsProcessorRule.mode) + && Objects.equals( + this.additionalProperties, + observabilityPipelineMetricTagsProcessorRule.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(action, include, keys, mode, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineMetricTagsProcessorRule {\n"); + sb.append(" action: ").append(toIndentedString(action)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" keys: ").append(toIndentedString(keys)).append("\n"); + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleAction.java new file mode 100644 index 00000000000..8fb93fc42fd --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleAction.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The action to take on tags with matching keys. */ +@JsonSerialize( + using = + ObservabilityPipelineMetricTagsProcessorRuleAction + .ObservabilityPipelineMetricTagsProcessorRuleActionSerializer.class) +public class ObservabilityPipelineMetricTagsProcessorRuleAction extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("include", "exclude")); + + public static final ObservabilityPipelineMetricTagsProcessorRuleAction INCLUDE = + new ObservabilityPipelineMetricTagsProcessorRuleAction("include"); + public static final ObservabilityPipelineMetricTagsProcessorRuleAction EXCLUDE = + new ObservabilityPipelineMetricTagsProcessorRuleAction("exclude"); + + ObservabilityPipelineMetricTagsProcessorRuleAction(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineMetricTagsProcessorRuleActionSerializer + extends StdSerializer { + public ObservabilityPipelineMetricTagsProcessorRuleActionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineMetricTagsProcessorRuleActionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineMetricTagsProcessorRuleAction value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineMetricTagsProcessorRuleAction fromValue(String value) { + return new ObservabilityPipelineMetricTagsProcessorRuleAction(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleMode.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleMode.java new file mode 100644 index 00000000000..79e95b5becc --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorRuleMode.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processing mode for tag filtering. */ +@JsonSerialize( + using = + ObservabilityPipelineMetricTagsProcessorRuleMode + .ObservabilityPipelineMetricTagsProcessorRuleModeSerializer.class) +public class ObservabilityPipelineMetricTagsProcessorRuleMode extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("filter")); + + public static final ObservabilityPipelineMetricTagsProcessorRuleMode FILTER = + new ObservabilityPipelineMetricTagsProcessorRuleMode("filter"); + + ObservabilityPipelineMetricTagsProcessorRuleMode(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineMetricTagsProcessorRuleModeSerializer + extends StdSerializer { + public ObservabilityPipelineMetricTagsProcessorRuleModeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineMetricTagsProcessorRuleModeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineMetricTagsProcessorRuleMode value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineMetricTagsProcessorRuleMode fromValue(String value) { + return new ObservabilityPipelineMetricTagsProcessorRuleMode(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorType.java new file mode 100644 index 00000000000..f92ca2d5158 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricTagsProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be metric_tags. */ +@JsonSerialize( + using = + ObservabilityPipelineMetricTagsProcessorType + .ObservabilityPipelineMetricTagsProcessorTypeSerializer.class) +public class ObservabilityPipelineMetricTagsProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("metric_tags")); + + public static final ObservabilityPipelineMetricTagsProcessorType METRIC_TAGS = + new ObservabilityPipelineMetricTagsProcessorType("metric_tags"); + + ObservabilityPipelineMetricTagsProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineMetricTagsProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineMetricTagsProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineMetricTagsProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineMetricTagsProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineMetricTagsProcessorType fromValue(String value) { + return new ObservabilityPipelineMetricTagsProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestination.java index 014f7edd62a..4b5a6059657 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The new_relic destination sends logs to the New Relic platform. */ +/** + * The new_relic destination sends logs to the New Relic platform. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineNewRelicDestination.JSON_PROPERTY_ID, ObservabilityPipelineNewRelicDestination.JSON_PROPERTY_INPUTS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessor.java index d130d36743b..b3febd86c13 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessor.java @@ -22,6 +22,8 @@ /** * The ocsf_mapper processor transforms logs into the OCSF schema using a predefined * mapping configuration. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineOcsfMapperProcessor.JSON_PROPERTY_DISPLAY_NAME, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestination.java index b9d0be60441..226e77d2de2 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The opensearch destination writes logs to an OpenSearch cluster. */ +/** + * The opensearch destination writes logs to an OpenSearch cluster. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineOpenSearchDestination.JSON_PROPERTY_BULK_INDEX, ObservabilityPipelineOpenSearchDestination.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java new file mode 100644 index 00000000000..e4a4e4e7912 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySource.java @@ -0,0 +1,276 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The opentelemetry source receives telemetry data using the OpenTelemetry Protocol + * (OTLP) over gRPC and HTTP. + * + *

Supported pipeline types: logs + */ +@JsonPropertyOrder({ + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_GRPC_ADDRESS_KEY, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_HTTP_ADDRESS_KEY, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_ID, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_TLS, + ObservabilityPipelineOpentelemetrySource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineOpentelemetrySource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_GRPC_ADDRESS_KEY = "grpc_address_key"; + private String grpcAddressKey; + + public static final String JSON_PROPERTY_HTTP_ADDRESS_KEY = "http_address_key"; + private String httpAddressKey; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineOpentelemetrySourceType type = + ObservabilityPipelineOpentelemetrySourceType.OPENTELEMETRY; + + public ObservabilityPipelineOpentelemetrySource() {} + + @JsonCreator + public ObservabilityPipelineOpentelemetrySource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineOpentelemetrySourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineOpentelemetrySource grpcAddressKey(String grpcAddressKey) { + this.grpcAddressKey = grpcAddressKey; + return this; + } + + /** + * Environment variable name containing the gRPC server address for receiving OTLP data. Must be a + * valid environment variable name (alphanumeric characters and underscores only). + * + * @return grpcAddressKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GRPC_ADDRESS_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getGrpcAddressKey() { + return grpcAddressKey; + } + + public void setGrpcAddressKey(String grpcAddressKey) { + this.grpcAddressKey = grpcAddressKey; + } + + public ObservabilityPipelineOpentelemetrySource httpAddressKey(String httpAddressKey) { + this.httpAddressKey = httpAddressKey; + return this; + } + + /** + * Environment variable name containing the HTTP server address for receiving OTLP data. Must be a + * valid environment variable name (alphanumeric characters and underscores only). + * + * @return httpAddressKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HTTP_ADDRESS_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHttpAddressKey() { + return httpAddressKey; + } + + public void setHttpAddressKey(String httpAddressKey) { + this.httpAddressKey = httpAddressKey; + } + + public ObservabilityPipelineOpentelemetrySource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineOpentelemetrySource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineOpentelemetrySource type( + ObservabilityPipelineOpentelemetrySourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be opentelemetry. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineOpentelemetrySourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineOpentelemetrySourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineOpentelemetrySource + */ + @JsonAnySetter + public ObservabilityPipelineOpentelemetrySource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineOpentelemetrySource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineOpentelemetrySource observabilityPipelineOpentelemetrySource = + (ObservabilityPipelineOpentelemetrySource) o; + return Objects.equals( + this.grpcAddressKey, observabilityPipelineOpentelemetrySource.grpcAddressKey) + && Objects.equals( + this.httpAddressKey, observabilityPipelineOpentelemetrySource.httpAddressKey) + && Objects.equals(this.id, observabilityPipelineOpentelemetrySource.id) + && Objects.equals(this.tls, observabilityPipelineOpentelemetrySource.tls) + && Objects.equals(this.type, observabilityPipelineOpentelemetrySource.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineOpentelemetrySource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(grpcAddressKey, httpAddressKey, id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineOpentelemetrySource {\n"); + sb.append(" grpcAddressKey: ").append(toIndentedString(grpcAddressKey)).append("\n"); + sb.append(" httpAddressKey: ").append(toIndentedString(httpAddressKey)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java new file mode 100644 index 00000000000..ffeef30f86e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpentelemetrySourceType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be opentelemetry. */ +@JsonSerialize( + using = + ObservabilityPipelineOpentelemetrySourceType + .ObservabilityPipelineOpentelemetrySourceTypeSerializer.class) +public class ObservabilityPipelineOpentelemetrySourceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("opentelemetry")); + + public static final ObservabilityPipelineOpentelemetrySourceType OPENTELEMETRY = + new ObservabilityPipelineOpentelemetrySourceType("opentelemetry"); + + ObservabilityPipelineOpentelemetrySourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineOpentelemetrySourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineOpentelemetrySourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineOpentelemetrySourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineOpentelemetrySourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineOpentelemetrySourceType fromValue(String value) { + return new ObservabilityPipelineOpentelemetrySourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessor.java index 6c55c0b3734..9f2de492a2b 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessor.java @@ -22,6 +22,8 @@ /** * The parse_grok processor extracts structured fields from unstructured log messages * using Grok patterns. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineParseGrokProcessor.JSON_PROPERTY_DISABLE_LIBRARY_RULES, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessor.java index 626348d368b..2656725e5d4 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessor.java @@ -20,6 +20,8 @@ /** * The parse_json processor extracts JSON from a specified field and flattens it into * the event. This is useful when logs contain embedded JSON as a string. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineParseJSONProcessor.JSON_PROPERTY_DISPLAY_NAME, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java new file mode 100644 index 00000000000..ca82ef4606d --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessor.java @@ -0,0 +1,505 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The parse_xml processor parses XML from a specified field and extracts it into the + * event. + * + *

Supported pipeline types: logs + */ +@JsonPropertyOrder({ + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ALWAYS_USE_TEXT_KEY, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ATTR_PREFIX, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_FIELD, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_INCLUDE_ATTR, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_BOOL, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_NULL, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_PARSE_NUMBER, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_TEXT_KEY, + ObservabilityPipelineParseXMLProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseXMLProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ALWAYS_USE_TEXT_KEY = "always_use_text_key"; + private Boolean alwaysUseTextKey; + + public static final String JSON_PROPERTY_ATTR_PREFIX = "attr_prefix"; + private String attrPrefix; + + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INCLUDE_ATTR = "include_attr"; + private Boolean includeAttr; + + public static final String JSON_PROPERTY_PARSE_BOOL = "parse_bool"; + private Boolean parseBool; + + public static final String JSON_PROPERTY_PARSE_NULL = "parse_null"; + private Boolean parseNull; + + public static final String JSON_PROPERTY_PARSE_NUMBER = "parse_number"; + private Boolean parseNumber; + + public static final String JSON_PROPERTY_TEXT_KEY = "text_key"; + private String textKey; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineParseXMLProcessorType type = + ObservabilityPipelineParseXMLProcessorType.PARSE_XML; + + public ObservabilityPipelineParseXMLProcessor() {} + + @JsonCreator + public ObservabilityPipelineParseXMLProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineParseXMLProcessorType type) { + this.enabled = enabled; + this.field = field; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineParseXMLProcessor alwaysUseTextKey(Boolean alwaysUseTextKey) { + this.alwaysUseTextKey = alwaysUseTextKey; + return this; + } + + /** + * Whether to always use a text key for element content. + * + * @return alwaysUseTextKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ALWAYS_USE_TEXT_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getAlwaysUseTextKey() { + return alwaysUseTextKey; + } + + public void setAlwaysUseTextKey(Boolean alwaysUseTextKey) { + this.alwaysUseTextKey = alwaysUseTextKey; + } + + public ObservabilityPipelineParseXMLProcessor attrPrefix(String attrPrefix) { + this.attrPrefix = attrPrefix; + return this; + } + + /** + * The prefix to use for XML attributes in the parsed output. + * + * @return attrPrefix + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ATTR_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getAttrPrefix() { + return attrPrefix; + } + + public void setAttrPrefix(String attrPrefix) { + this.attrPrefix = attrPrefix; + } + + public ObservabilityPipelineParseXMLProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineParseXMLProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineParseXMLProcessor field(String field) { + this.field = field; + return this; + } + + /** + * The name of the log field that contains an XML string. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineParseXMLProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineParseXMLProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineParseXMLProcessor includeAttr(Boolean includeAttr) { + this.includeAttr = includeAttr; + return this; + } + + /** + * Whether to include XML attributes in the parsed output. + * + * @return includeAttr + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_INCLUDE_ATTR) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getIncludeAttr() { + return includeAttr; + } + + public void setIncludeAttr(Boolean includeAttr) { + this.includeAttr = includeAttr; + } + + public ObservabilityPipelineParseXMLProcessor parseBool(Boolean parseBool) { + this.parseBool = parseBool; + return this; + } + + /** + * Whether to parse boolean values from strings. + * + * @return parseBool + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_BOOL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseBool() { + return parseBool; + } + + public void setParseBool(Boolean parseBool) { + this.parseBool = parseBool; + } + + public ObservabilityPipelineParseXMLProcessor parseNull(Boolean parseNull) { + this.parseNull = parseNull; + return this; + } + + /** + * Whether to parse null values. + * + * @return parseNull + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_NULL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseNull() { + return parseNull; + } + + public void setParseNull(Boolean parseNull) { + this.parseNull = parseNull; + } + + public ObservabilityPipelineParseXMLProcessor parseNumber(Boolean parseNumber) { + this.parseNumber = parseNumber; + return this; + } + + /** + * Whether to parse numeric values from strings. + * + * @return parseNumber + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARSE_NUMBER) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getParseNumber() { + return parseNumber; + } + + public void setParseNumber(Boolean parseNumber) { + this.parseNumber = parseNumber; + } + + public ObservabilityPipelineParseXMLProcessor textKey(String textKey) { + this.textKey = textKey; + return this; + } + + /** + * The key name to use for text content within XML elements. Must be at least 1 character if + * specified. + * + * @return textKey + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TEXT_KEY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getTextKey() { + return textKey; + } + + public void setTextKey(String textKey) { + this.textKey = textKey; + } + + public ObservabilityPipelineParseXMLProcessor type( + ObservabilityPipelineParseXMLProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be parse_xml. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineParseXMLProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineParseXMLProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseXMLProcessor + */ + @JsonAnySetter + public ObservabilityPipelineParseXMLProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineParseXMLProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseXMLProcessor observabilityPipelineParseXmlProcessor = + (ObservabilityPipelineParseXMLProcessor) o; + return Objects.equals( + this.alwaysUseTextKey, observabilityPipelineParseXmlProcessor.alwaysUseTextKey) + && Objects.equals(this.attrPrefix, observabilityPipelineParseXmlProcessor.attrPrefix) + && Objects.equals(this.displayName, observabilityPipelineParseXmlProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineParseXmlProcessor.enabled) + && Objects.equals(this.field, observabilityPipelineParseXmlProcessor.field) + && Objects.equals(this.id, observabilityPipelineParseXmlProcessor.id) + && Objects.equals(this.include, observabilityPipelineParseXmlProcessor.include) + && Objects.equals(this.includeAttr, observabilityPipelineParseXmlProcessor.includeAttr) + && Objects.equals(this.parseBool, observabilityPipelineParseXmlProcessor.parseBool) + && Objects.equals(this.parseNull, observabilityPipelineParseXmlProcessor.parseNull) + && Objects.equals(this.parseNumber, observabilityPipelineParseXmlProcessor.parseNumber) + && Objects.equals(this.textKey, observabilityPipelineParseXmlProcessor.textKey) + && Objects.equals(this.type, observabilityPipelineParseXmlProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineParseXmlProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + alwaysUseTextKey, + attrPrefix, + displayName, + enabled, + field, + id, + include, + includeAttr, + parseBool, + parseNull, + parseNumber, + textKey, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseXMLProcessor {\n"); + sb.append(" alwaysUseTextKey: ").append(toIndentedString(alwaysUseTextKey)).append("\n"); + sb.append(" attrPrefix: ").append(toIndentedString(attrPrefix)).append("\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" includeAttr: ").append(toIndentedString(includeAttr)).append("\n"); + sb.append(" parseBool: ").append(toIndentedString(parseBool)).append("\n"); + sb.append(" parseNull: ").append(toIndentedString(parseNull)).append("\n"); + sb.append(" parseNumber: ").append(toIndentedString(parseNumber)).append("\n"); + sb.append(" textKey: ").append(toIndentedString(textKey)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java new file mode 100644 index 00000000000..8101a254703 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseXMLProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be parse_xml. */ +@JsonSerialize( + using = + ObservabilityPipelineParseXMLProcessorType + .ObservabilityPipelineParseXMLProcessorTypeSerializer.class) +public class ObservabilityPipelineParseXMLProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("parse_xml")); + + public static final ObservabilityPipelineParseXMLProcessorType PARSE_XML = + new ObservabilityPipelineParseXMLProcessorType("parse_xml"); + + ObservabilityPipelineParseXMLProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineParseXMLProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineParseXMLProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineParseXMLProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineParseXMLProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineParseXMLProcessorType fromValue(String value) { + return new ObservabilityPipelineParseXMLProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java deleted file mode 100644 index 6ec67b62962..00000000000 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. - * This product includes software developed at Datadog (https://www.datadoghq.com/). - * Copyright 2019-Present Datadog, Inc. - */ - -package com.datadog.api.client.v2.model; - -import com.datadog.api.client.ModelEnum; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.fasterxml.jackson.databind.ser.std.StdSerializer; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - -/** SASL mechanism used for Kafka authentication. */ -@JsonSerialize( - using = - ObservabilityPipelinePipelineKafkaSourceSaslMechanism - .ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer.class) -public class ObservabilityPipelinePipelineKafkaSourceSaslMechanism extends ModelEnum { - - private static final Set allowedValues = - new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512")); - - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism PLAIN = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("PLAIN"); - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_256 = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256"); - public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_512 = - new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512"); - - ObservabilityPipelinePipelineKafkaSourceSaslMechanism(String value) { - super(value, allowedValues); - } - - public static class ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer - extends StdSerializer { - public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer( - Class t) { - super(t); - } - - public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer() { - this(null); - } - - @Override - public void serialize( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism value, - JsonGenerator jgen, - SerializerProvider provider) - throws IOException, JsonProcessingException { - jgen.writeObject(value.value); - } - } - - @JsonCreator - public static ObservabilityPipelinePipelineKafkaSourceSaslMechanism fromValue(String value) { - return new ObservabilityPipelinePipelineKafkaSourceSaslMechanism(value); - } -} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java index 20442a9ea24..4832a55b924 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java @@ -20,8 +20,10 @@ import java.util.Objects; /** - * The Quota Processor measures logging traffic for logs that match a specified filter. When the - * configured daily quota is met, the processor can drop or alert. + * The quota processor measures logging traffic for logs that match a specified filter. + * When the configured daily quota is met, the processor can drop or alert. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_DISPLAY_NAME, @@ -35,6 +37,7 @@ ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_OVERFLOW_ACTION, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_OVERRIDES, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_PARTITION_FIELDS, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_TOO_MANY_BUCKETS_ACTION, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_TYPE }) @jakarta.annotation.Generated( @@ -75,6 +78,9 @@ public class ObservabilityPipelineQuotaProcessor { public static final String JSON_PROPERTY_PARTITION_FIELDS = "partition_fields"; private List partitionFields = null; + public static final String JSON_PROPERTY_TOO_MANY_BUCKETS_ACTION = "too_many_buckets_action"; + private ObservabilityPipelineQuotaProcessorOverflowAction tooManyBucketsAction; + public static final String JSON_PROPERTY_TYPE = "type"; private ObservabilityPipelineQuotaProcessorType type = ObservabilityPipelineQuotaProcessorType.QUOTA; @@ -128,9 +134,10 @@ public ObservabilityPipelineQuotaProcessor dropEvents(Boolean dropEvents) { } /** - * If set to true, logs that matched the quota filter and sent after the quota has - * been met are dropped; only logs that did not match the filter query continue through the - * pipeline. + * If set to true, logs that match the quota filter and are sent after the quota is + * exceeded are dropped. Logs that do not match the filter continue through the pipeline. + * Note: You can set either drop_events or overflow_action + * , but not both. * * @return dropEvents */ @@ -279,9 +286,9 @@ public ObservabilityPipelineQuotaProcessor overflowAction( } /** - * The action to take when the quota is exceeded. Options: - drop: Drop the event. - - * no_action: Let the event pass through. - overflow_routing: Route to - * an overflow destination. + * The action to take when the quota or bucket limit is exceeded. Options: - drop: + * Drop the event. - no_action: Let the event pass through. - overflow_routing + * : Route to an overflow destination. * * @return overflowAction */ @@ -365,6 +372,35 @@ public void setPartitionFields(List partitionFields) { this.partitionFields = partitionFields; } + public ObservabilityPipelineQuotaProcessor tooManyBucketsAction( + ObservabilityPipelineQuotaProcessorOverflowAction tooManyBucketsAction) { + this.tooManyBucketsAction = tooManyBucketsAction; + this.unparsed |= !tooManyBucketsAction.isValid(); + return this; + } + + /** + * The action to take when the quota or bucket limit is exceeded. Options: - drop: + * Drop the event. - no_action: Let the event pass through. - overflow_routing + * : Route to an overflow destination. + * + * @return tooManyBucketsAction + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TOO_MANY_BUCKETS_ACTION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineQuotaProcessorOverflowAction getTooManyBucketsAction() { + return tooManyBucketsAction; + } + + public void setTooManyBucketsAction( + ObservabilityPipelineQuotaProcessorOverflowAction tooManyBucketsAction) { + if (!tooManyBucketsAction.isValid()) { + this.unparsed = true; + } + this.tooManyBucketsAction = tooManyBucketsAction; + } + public ObservabilityPipelineQuotaProcessor type(ObservabilityPipelineQuotaProcessorType type) { this.type = type; this.unparsed |= !type.isValid(); @@ -459,6 +495,8 @@ public boolean equals(Object o) { && Objects.equals(this.overflowAction, observabilityPipelineQuotaProcessor.overflowAction) && Objects.equals(this.overrides, observabilityPipelineQuotaProcessor.overrides) && Objects.equals(this.partitionFields, observabilityPipelineQuotaProcessor.partitionFields) + && Objects.equals( + this.tooManyBucketsAction, observabilityPipelineQuotaProcessor.tooManyBucketsAction) && Objects.equals(this.type, observabilityPipelineQuotaProcessor.type) && Objects.equals( this.additionalProperties, observabilityPipelineQuotaProcessor.additionalProperties); @@ -478,6 +516,7 @@ public int hashCode() { overflowAction, overrides, partitionFields, + tooManyBucketsAction, type, additionalProperties); } @@ -499,6 +538,9 @@ public String toString() { sb.append(" overflowAction: ").append(toIndentedString(overflowAction)).append("\n"); sb.append(" overrides: ").append(toIndentedString(overrides)).append("\n"); sb.append(" partitionFields: ").append(toIndentedString(partitionFields)).append("\n"); + sb.append(" tooManyBucketsAction: ") + .append(toIndentedString(tooManyBucketsAction)) + .append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java index ddb04343a50..953d483a274 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java @@ -19,9 +19,9 @@ import java.util.Set; /** - * The action to take when the quota is exceeded. Options: - drop: Drop the event. - - * no_action: Let the event pass through. - overflow_routing: Route to an - * overflow destination. + * The action to take when the quota or bucket limit is exceeded. Options: - drop: Drop + * the event. - no_action: Let the event pass through. - overflow_routing: + * Route to an overflow destination. */ @JsonSerialize( using = diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessor.java index 408ac3a7bf7..37de48c1f8f 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessor.java @@ -22,6 +22,8 @@ /** * The reduce processor aggregates and merges logs based on matching keys and merge * strategies. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineReduceProcessor.JSON_PROPERTY_DISPLAY_NAME, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessor.java index 3b7a8ba180a..a37a300c44a 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessor.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The remove_fields processor deletes specified fields from logs. */ +/** + * The remove_fields processor deletes specified fields from logs. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineRemoveFieldsProcessor.JSON_PROPERTY_DISPLAY_NAME, ObservabilityPipelineRemoveFieldsProcessor.JSON_PROPERTY_ENABLED, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessor.java index 60c2b3c54b6..694a6724128 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessor.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The rename_fields processor changes field names. */ +/** + * The rename_fields processor changes field names. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineRenameFieldsProcessor.JSON_PROPERTY_DISPLAY_NAME, ObservabilityPipelineRenameFieldsProcessor.JSON_PROPERTY_ENABLED, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestination.java index 3a2cd8d24a8..bb99d29f889 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestination.java @@ -22,6 +22,8 @@ /** * The rsyslog destination forwards logs to an external rsyslog server * over TCP or UDP using the syslog protocol. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineRsyslogDestination.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSource.java index ecf24cdb4f3..e04978449d8 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSource.java @@ -20,6 +20,8 @@ /** * The rsyslog source listens for logs over TCP or UDP from an rsyslog * server using the syslog protocol. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineRsyslogSource.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java index 0c555e38a0e..f66e041373e 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java @@ -13,18 +13,24 @@ import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Objects; -/** The sample processor allows probabilistic sampling of logs at a fixed rate. */ +/** + * The sample processor allows probabilistic sampling of logs at a fixed rate. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineSampleProcessor.JSON_PROPERTY_DISPLAY_NAME, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_GROUP_BY, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_ID, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_INCLUDE, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_PERCENTAGE, - ObservabilityPipelineSampleProcessor.JSON_PROPERTY_RATE, ObservabilityPipelineSampleProcessor.JSON_PROPERTY_TYPE }) @jakarta.annotation.Generated( @@ -37,6 +43,9 @@ public class ObservabilityPipelineSampleProcessor { public static final String JSON_PROPERTY_ENABLED = "enabled"; private Boolean enabled; + public static final String JSON_PROPERTY_GROUP_BY = "group_by"; + private List groupBy = null; + public static final String JSON_PROPERTY_ID = "id"; private String id; @@ -46,9 +55,6 @@ public class ObservabilityPipelineSampleProcessor { public static final String JSON_PROPERTY_PERCENTAGE = "percentage"; private Double percentage; - public static final String JSON_PROPERTY_RATE = "rate"; - private Long rate; - public static final String JSON_PROPERTY_TYPE = "type"; private ObservabilityPipelineSampleProcessorType type = ObservabilityPipelineSampleProcessorType.SAMPLE; @@ -60,11 +66,13 @@ public ObservabilityPipelineSampleProcessor( @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_PERCENTAGE) Double percentage, @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) ObservabilityPipelineSampleProcessorType type) { this.enabled = enabled; this.id = id; this.include = include; + this.percentage = percentage; this.type = type; this.unparsed |= !type.isValid(); } @@ -110,6 +118,35 @@ public void setEnabled(Boolean enabled) { this.enabled = enabled; } + public ObservabilityPipelineSampleProcessor groupBy(List groupBy) { + this.groupBy = groupBy; + return this; + } + + public ObservabilityPipelineSampleProcessor addGroupByItem(String groupByItem) { + if (this.groupBy == null) { + this.groupBy = new ArrayList<>(); + } + this.groupBy.add(groupByItem); + return this; + } + + /** + * Optional list of fields to group events by. Each group is sampled independently. + * + * @return groupBy + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GROUP_BY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getGroupBy() { + return groupBy; + } + + public void setGroupBy(List groupBy) { + this.groupBy = groupBy; + } + public ObservabilityPipelineSampleProcessor id(String id) { this.id = id; return this; @@ -161,9 +198,8 @@ public ObservabilityPipelineSampleProcessor percentage(Double percentage) { * * @return percentage */ - @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_PERCENTAGE) - @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) public Double getPercentage() { return percentage; } @@ -172,27 +208,6 @@ public void setPercentage(Double percentage) { this.percentage = percentage; } - public ObservabilityPipelineSampleProcessor rate(Long rate) { - this.rate = rate; - return this; - } - - /** - * Number of events to sample (1 in N). minimum: 1 - * - * @return rate - */ - @jakarta.annotation.Nullable - @JsonProperty(JSON_PROPERTY_RATE) - @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) - public Long getRate() { - return rate; - } - - public void setRate(Long rate) { - this.rate = rate; - } - public ObservabilityPipelineSampleProcessor type(ObservabilityPipelineSampleProcessorType type) { this.type = type; this.unparsed |= !type.isValid(); @@ -276,10 +291,10 @@ public boolean equals(Object o) { (ObservabilityPipelineSampleProcessor) o; return Objects.equals(this.displayName, observabilityPipelineSampleProcessor.displayName) && Objects.equals(this.enabled, observabilityPipelineSampleProcessor.enabled) + && Objects.equals(this.groupBy, observabilityPipelineSampleProcessor.groupBy) && Objects.equals(this.id, observabilityPipelineSampleProcessor.id) && Objects.equals(this.include, observabilityPipelineSampleProcessor.include) && Objects.equals(this.percentage, observabilityPipelineSampleProcessor.percentage) - && Objects.equals(this.rate, observabilityPipelineSampleProcessor.rate) && Objects.equals(this.type, observabilityPipelineSampleProcessor.type) && Objects.equals( this.additionalProperties, observabilityPipelineSampleProcessor.additionalProperties); @@ -288,7 +303,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - displayName, enabled, id, include, percentage, rate, type, additionalProperties); + displayName, enabled, groupBy, id, include, percentage, type, additionalProperties); } @Override @@ -297,10 +312,10 @@ public String toString() { sb.append("class ObservabilityPipelineSampleProcessor {\n"); sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" groupBy: ").append(toIndentedString(groupBy)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" include: ").append(toIndentedString(include)).append("\n"); sb.append(" percentage: ").append(toIndentedString(percentage)).append("\n"); - sb.append(" rate: ").append(toIndentedString(rate)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessor.java index f552451f8b5..b3ffefda2a5 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessor.java @@ -22,6 +22,8 @@ /** * The sensitive_data_scanner processor detects and optionally redacts sensitive data * in log events. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineSensitiveDataScannerProcessor.JSON_PROPERTY_DISPLAY_NAME, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java index e74c5ffd699..b197df9ed19 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java @@ -19,12 +19,16 @@ /** Options for defining a custom regex pattern. */ @JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.JSON_PROPERTY_DESCRIPTION, ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.JSON_PROPERTY_RULE }) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") public class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions { @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DESCRIPTION = "description"; + private String description; + public static final String JSON_PROPERTY_RULE = "rule"; private String rule; @@ -36,6 +40,28 @@ public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions( this.rule = rule; } + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions description( + String description) { + this.description = description; + return this; + } + + /** + * Human-readable description providing context about a sensitive data scanner rule + * + * @return description + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DESCRIPTION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions rule(String rule) { this.rule = rule; return this; @@ -119,6 +145,9 @@ public boolean equals(Object o) { observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions = (ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions) o; return Objects.equals( + this.description, + observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.description) + && Objects.equals( this.rule, observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.rule) && Objects.equals( this.additionalProperties, @@ -128,13 +157,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(rule, additionalProperties); + return Objects.hash(description, rule, additionalProperties); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions {\n"); + sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java index 76f9882783c..35fdd21bbad 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java @@ -19,6 +19,7 @@ /** Options for selecting a predefined library pattern and enabling keyword support. */ @JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.JSON_PROPERTY_DESCRIPTION, ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.JSON_PROPERTY_ID, ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions .JSON_PROPERTY_USE_RECOMMENDED_KEYWORDS @@ -27,6 +28,9 @@ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") public class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions { @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DESCRIPTION = "description"; + private String description; + public static final String JSON_PROPERTY_ID = "id"; private String id; @@ -41,6 +45,28 @@ public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions( this.id = id; } + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions description( + String description) { + this.description = description; + return this; + } + + /** + * Human-readable description providing context about a sensitive data scanner rule + * + * @return description + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DESCRIPTION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions id(String id) { this.id = id; return this; @@ -146,6 +172,9 @@ public boolean equals(Object o) { observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions = (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions) o; return Objects.equals( + this.description, + observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.description) + && Objects.equals( this.id, observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.id) && Objects.equals( this.useRecommendedKeywords, @@ -159,13 +188,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(id, useRecommendedKeywords, additionalProperties); + return Objects.hash(description, id, useRecommendedKeywords, additionalProperties); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions {\n"); + sb.append(" description: ").append(toIndentedString(description)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" useRecommendedKeywords: ") .append(toIndentedString(useRecommendedKeywords)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestination.java index 01f5acac7ee..afa0ca3f007 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The sentinel_one destination sends logs to SentinelOne. */ +/** + * The sentinel_one destination sends logs to SentinelOne. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineSentinelOneDestination.JSON_PROPERTY_ID, ObservabilityPipelineSentinelOneDestination.JSON_PROPERTY_INPUTS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketDestination.java index 775a97879fd..c2193bcfa08 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The socket destination sends logs over TCP or UDP to a remote server. */ +/** + * The socket destination sends logs over TCP or UDP to a remote server. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineSocketDestination.JSON_PROPERTY_ENCODING, ObservabilityPipelineSocketDestination.JSON_PROPERTY_FRAMING, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketSource.java index 540d3d87e1b..41489242b8a 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSocketSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The socket source ingests logs over TCP or UDP. */ +/** + * The socket source ingests logs over TCP or UDP. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineSocketSource.JSON_PROPERTY_FRAMING, ObservabilityPipelineSocketSource.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java new file mode 100644 index 00000000000..1b99a65680d --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessor.java @@ -0,0 +1,316 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The split_array processor splits array fields into separate events based on + * configured rules. + * + *

Supported pipeline types: logs + */ +@JsonPropertyOrder({ + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ARRAYS, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_DISPLAY_NAME, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ENABLED, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineSplitArrayProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplitArrayProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ARRAYS = "arrays"; + private List arrays = new ArrayList<>(); + + public static final String JSON_PROPERTY_DISPLAY_NAME = "display_name"; + private String displayName; + + public static final String JSON_PROPERTY_ENABLED = "enabled"; + private Boolean enabled; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplitArrayProcessorType type = + ObservabilityPipelineSplitArrayProcessorType.SPLIT_ARRAY; + + public ObservabilityPipelineSplitArrayProcessor() {} + + @JsonCreator + public ObservabilityPipelineSplitArrayProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ARRAYS) + List arrays, + @JsonProperty(required = true, value = JSON_PROPERTY_ENABLED) Boolean enabled, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplitArrayProcessorType type) { + this.arrays = arrays; + this.enabled = enabled; + this.id = id; + this.include = include; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplitArrayProcessor arrays( + List arrays) { + this.arrays = arrays; + for (ObservabilityPipelineSplitArrayProcessorArrayConfig item : arrays) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineSplitArrayProcessor addArraysItem( + ObservabilityPipelineSplitArrayProcessorArrayConfig arraysItem) { + this.arrays.add(arraysItem); + this.unparsed |= arraysItem.unparsed; + return this; + } + + /** + * A list of array split configurations. + * + * @return arrays + */ + @JsonProperty(JSON_PROPERTY_ARRAYS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getArrays() { + return arrays; + } + + public void setArrays(List arrays) { + this.arrays = arrays; + } + + public ObservabilityPipelineSplitArrayProcessor displayName(String displayName) { + this.displayName = displayName; + return this; + } + + /** + * The display name for a component. + * + * @return displayName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISPLAY_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getDisplayName() { + return displayName; + } + + public void setDisplayName(String displayName) { + this.displayName = displayName; + } + + public ObservabilityPipelineSplitArrayProcessor enabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + /** + * Whether this processor is enabled. + * + * @return enabled + */ + @JsonProperty(JSON_PROPERTY_ENABLED) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public ObservabilityPipelineSplitArrayProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplitArrayProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. For split_array, + * this should typically be *. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineSplitArrayProcessor type( + ObservabilityPipelineSplitArrayProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be split_array. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplitArrayProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplitArrayProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplitArrayProcessor + */ + @JsonAnySetter + public ObservabilityPipelineSplitArrayProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplitArrayProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplitArrayProcessor observabilityPipelineSplitArrayProcessor = + (ObservabilityPipelineSplitArrayProcessor) o; + return Objects.equals(this.arrays, observabilityPipelineSplitArrayProcessor.arrays) + && Objects.equals(this.displayName, observabilityPipelineSplitArrayProcessor.displayName) + && Objects.equals(this.enabled, observabilityPipelineSplitArrayProcessor.enabled) + && Objects.equals(this.id, observabilityPipelineSplitArrayProcessor.id) + && Objects.equals(this.include, observabilityPipelineSplitArrayProcessor.include) + && Objects.equals(this.type, observabilityPipelineSplitArrayProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplitArrayProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(arrays, displayName, enabled, id, include, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplitArrayProcessor {\n"); + sb.append(" arrays: ").append(toIndentedString(arrays)).append("\n"); + sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n"); + sb.append(" enabled: ").append(toIndentedString(enabled)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java new file mode 100644 index 00000000000..fab0f28e287 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorArrayConfig.java @@ -0,0 +1,180 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for a single array split operation. */ +@JsonPropertyOrder({ + ObservabilityPipelineSplitArrayProcessorArrayConfig.JSON_PROPERTY_FIELD, + ObservabilityPipelineSplitArrayProcessorArrayConfig.JSON_PROPERTY_INCLUDE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplitArrayProcessorArrayConfig { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public ObservabilityPipelineSplitArrayProcessorArrayConfig() {} + + @JsonCreator + public ObservabilityPipelineSplitArrayProcessorArrayConfig( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include) { + this.field = field; + this.include = include; + } + + public ObservabilityPipelineSplitArrayProcessorArrayConfig field(String field) { + this.field = field; + return this; + } + + /** + * The path to the array field to split. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineSplitArrayProcessorArrayConfig include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this array split operation targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplitArrayProcessorArrayConfig + */ + @JsonAnySetter + public ObservabilityPipelineSplitArrayProcessorArrayConfig putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSplitArrayProcessorArrayConfig object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplitArrayProcessorArrayConfig + observabilityPipelineSplitArrayProcessorArrayConfig = + (ObservabilityPipelineSplitArrayProcessorArrayConfig) o; + return Objects.equals(this.field, observabilityPipelineSplitArrayProcessorArrayConfig.field) + && Objects.equals(this.include, observabilityPipelineSplitArrayProcessorArrayConfig.include) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplitArrayProcessorArrayConfig.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, include, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplitArrayProcessorArrayConfig {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java new file mode 100644 index 00000000000..ec68b2c2563 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplitArrayProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be split_array. */ +@JsonSerialize( + using = + ObservabilityPipelineSplitArrayProcessorType + .ObservabilityPipelineSplitArrayProcessorTypeSerializer.class) +public class ObservabilityPipelineSplitArrayProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("split_array")); + + public static final ObservabilityPipelineSplitArrayProcessorType SPLIT_ARRAY = + new ObservabilityPipelineSplitArrayProcessorType("split_array"); + + ObservabilityPipelineSplitArrayProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplitArrayProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplitArrayProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplitArrayProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplitArrayProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplitArrayProcessorType fromValue(String value) { + return new ObservabilityPipelineSplitArrayProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java index 3eafb768273..8defa988f70 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java @@ -22,6 +22,8 @@ /** * The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector * (HEC). + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_AUTO_EXTRACT_TIMESTAMP, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java index a554df706a2..c06c397b025 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API. */ +/** + * The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_ID, ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_TLS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java index 8589db20825..7f5ba51ca2d 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java @@ -20,6 +20,8 @@ /** * The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP. TLS * is supported for secure transmission. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineSplunkTcpSource.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestination.java index ae5f61dcbce..241f315a1a7 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestination.java @@ -19,7 +19,11 @@ import java.util.Map; import java.util.Objects; -/** The sumo_logic destination forwards logs to Sumo Logic. */ +/** + * The sumo_logic destination forwards logs to Sumo Logic. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_ENCODING, ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_HEADER_CUSTOM_FIELDS, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSource.java index cfdcae3a4d3..1c73ba31cd1 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSource.java @@ -17,7 +17,11 @@ import java.util.Map; import java.util.Objects; -/** The sumo_logic source receives logs from Sumo Logic collectors. */ +/** + * The sumo_logic source receives logs from Sumo Logic collectors. + * + *

Supported pipeline types: logs + */ @JsonPropertyOrder({ ObservabilityPipelineSumoLogicSource.JSON_PROPERTY_ID, ObservabilityPipelineSumoLogicSource.JSON_PROPERTY_TYPE diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestination.java index b8b70fe8139..f048dd4b192 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestination.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestination.java @@ -22,6 +22,8 @@ /** * The syslog_ng destination forwards logs to an external syslog-ng server * over TCP or UDP using the syslog protocol. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineSyslogNgDestination.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSource.java index 6f371c6e840..d6537d7f92a 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSource.java @@ -20,6 +20,8 @@ /** * The syslog_ng source listens for logs over TCP or UDP from a syslog-ng * server using the syslog protocol. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineSyslogNgSource.JSON_PROPERTY_ID, diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessor.java index 2278ae96c76..9eb9788dbe3 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessor.java @@ -22,6 +22,8 @@ /** * The throttle processor limits the number of events that pass through over a given * time window. + * + *

Supported pipeline types: logs */ @JsonPropertyOrder({ ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_DISPLAY_NAME, diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze index 73133f1c75e..24768509dc4 100644 --- a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:15.575Z \ No newline at end of file +2026-01-08T10:31:26.805Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.json index c339ea4d384..f88e9579f9a 100644 --- a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.json +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.json @@ -7,7 +7,7 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, @@ -27,6 +27,6 @@ "timeToLive": { "unlimited": true }, - "id": "95a57011-26df-3343-b440-f0295430ff3b" + "id": "f570a090-dc8d-8dc9-a94c-5375590acb1d" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze index 20165353d5c..2af2b8c19e7 100644 --- a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:16.062Z \ No newline at end of file +2026-01-08T10:31:27.246Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json index 107f907c7e8..807c3fea76d 100644 --- a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json @@ -7,12 +7,12 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"bd8d693c-dc2c-11f0-bf69-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"my-processor-group\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"enabled\":true,\"id\":\"my-processor-group\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"3075f634-ec7d-11f0-a7f6-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"my-processor-group\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"enabled\":true,\"id\":\"my-processor-group\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -27,13 +27,13 @@ "timeToLive": { "unlimited": true }, - "id": "2cc7f51a-ea1a-7fda-97ed-d909fad481a4" + "id": "060acb65-7a12-b2a9-8635-609549001241" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/bd8d693c-dc2c-11f0-bf69-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/3075f634-ec7d-11f0-a7f6-da7ad0900002", "keepAlive": false, "secure": true }, @@ -52,6 +52,6 @@ "timeToLive": { "unlimited": true }, - "id": "9d38f9f7-007b-c09a-6053-3acbf9ac24b4" + "id": "401f56a9-d469-23df-dc5b-a59c4fa07b2a" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze index 85deda9192b..9d6f73cb826 100644 --- a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:17.165Z \ No newline at end of file +2026-01-08T10:31:28.580Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.json b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.json index 633377ecb4c..6dcf9a0ec24 100644 --- a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.json +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.json @@ -3,7 +3,7 @@ "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6", + "path": "/api/v2/obs-pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6", "keepAlive": false, "secure": true }, @@ -23,6 +23,6 @@ "timeToLive": { "unlimited": true }, - "id": "d504871e-95a9-3921-d8b6-426d67fce395" + "id": "5d00cc15-9785-8838-1fbb-b4a1545c67cc" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze index 201ee9bda87..0d1b0655cb9 100644 --- a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:17.716Z \ No newline at end of file +2026-01-08T10:31:29.062Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json index a2c24593f46..8d1df6a2a90 100644 --- a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json @@ -7,12 +7,12 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"be89fea4-dc2c-11f0-bdea-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"318c25c0-ec7d-11f0-a7f8-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -27,13 +27,13 @@ "timeToLive": { "unlimited": true }, - "id": "d6a6f7a2-f347-564f-cbfb-e972ad921c9a" + "id": "9ecc9b2e-1386-d424-2d27-09de086642de" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/318c25c0-ec7d-11f0-a7f8-da7ad0900002", "keepAlive": false, "secure": true }, @@ -52,13 +52,13 @@ "timeToLive": { "unlimited": true }, - "id": "2f02fb97-f9e3-4f3a-313a-6d3e4dc210d9" + "id": "a99f789c-f211-9fe4-ad23-fd9dd0cccf60" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/318c25c0-ec7d-11f0-a7f8-da7ad0900002", "keepAlive": false, "secure": true }, @@ -78,6 +78,6 @@ "timeToLive": { "unlimited": true }, - "id": "2f02fb97-f9e3-4f3a-313a-6d3e4dc210da" + "id": "a99f789c-f211-9fe4-ad23-fd9dd0cccf61" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze index 2da6d0e5c1d..782d36e8d95 100644 --- a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:20.018Z \ No newline at end of file +2026-01-08T10:31:31.281Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json index 7a12e819679..e12bc9bd741 100644 --- a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json @@ -7,12 +7,12 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"bfe664a4-dc2c-11f0-bdec-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"32dabdec-ec7d-11f0-a7fa-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -27,18 +27,18 @@ "timeToLive": { "unlimited": true }, - "id": "d6a6f7a2-f347-564f-cbfb-e972ad921c98" + "id": "9ecc9b2e-1386-d424-2d27-09de086642dc" }, { "httpRequest": { "headers": {}, "method": "GET", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/32dabdec-ec7d-11f0-a7fa-da7ad0900002", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"bfe664a4-dc2c-11f0-bdec-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"32dabdec-ec7d-11f0-a7fa-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -53,13 +53,13 @@ "timeToLive": { "unlimited": true }, - "id": "21f7e5ab-ea39-6e8e-5605-c43a9e3acdd0" + "id": "796c5103-b89b-3184-a28d-b2df07eb3af9" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/32dabdec-ec7d-11f0-a7fa-da7ad0900002", "keepAlive": false, "secure": true }, @@ -78,6 +78,6 @@ "timeToLive": { "unlimited": true }, - "id": "83781d2a-55f3-79d9-1709-474817512e21" + "id": "f7b4ea30-50bb-1471-acf0-98e54e1564e7" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.freeze index a1ae2640d7d..713381d9f53 100644 --- a/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.freeze +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:22.038Z \ No newline at end of file +2026-01-08T10:31:33.196Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.json index 3b86c9b32e2..239a196a08a 100644 --- a/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.json +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.json @@ -3,7 +3,7 @@ "httpRequest": { "headers": {}, "method": "GET", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "queryStringParameters": { "page[size]": [ "0" @@ -28,6 +28,6 @@ "timeToLive": { "unlimited": true }, - "id": "986c80b1-2bab-98f5-4c61-f0c3e4c7fb8c" + "id": "560cc035-2b0e-950a-431f-343291637299" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.freeze index fb745af4ce5..7d97484ce04 100644 --- a/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:22.507Z \ No newline at end of file +2026-01-08T10:31:33.639Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.json b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.json index 73cbac72655..8705dad44da 100644 --- a/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.json @@ -7,12 +7,12 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c162e83e-dc2c-11f0-bf6b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"34447006-ec7d-11f0-8885-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -27,18 +27,18 @@ "timeToLive": { "unlimited": true }, - "id": "d6a6f7a2-f347-564f-cbfb-e972ad921c9b" + "id": "9ecc9b2e-1386-d424-2d27-09de086642df" }, { "httpRequest": { "headers": {}, "method": "GET", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":[{\"id\":\"c162e83e-dc2c-11f0-bf6b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}],\"meta\":{\"totalCount\":1}}\n", + "body": "{\"data\":[{\"id\":\"4bf478ba-dc68-11f0-87e9-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"a78e416a-de66-11f0-a039-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"http-server-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"http-source-1\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"auth_strategy\":\"plain\",\"decoding\":\"json\",\"id\":\"http-source-1\",\"tls\":{\"ca_file\":\"/etc/ssl/certs/ca.crt\",\"crt_file\":\"/etc/ssl/certs/http.crt\",\"key_file\":\"/etc/ssl/private/http.key\"},\"type\":\"http_server\"}]}}},{\"id\":\"a84fd58c-de66-11f0-a03b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"amazon_s3-source-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"s3-source-1\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"auth\":{\"assume_role\":\"arn:aws:iam::123456789012:role/test-role\",\"external_id\":\"external-test-id\",\"session_name\":\"session-test\"},\"id\":\"s3-source-1\",\"region\":\"us-east-1\",\"tls\":{\"ca_file\":\"/etc/ssl/certs/s3.ca\",\"crt_file\":\"/etc/ssl/certs/s3.crt\",\"key_file\":\"/etc/ssl/private/s3.key\"},\"type\":\"amazon_s3\"}]}}},{\"id\":\"a42e22e0-df49-11f0-81d5-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"dedupe pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"dedupe-group-2\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"enabled\":true,\"id\":\"dedupe-group-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"processors\":[{\"enabled\":true,\"fields\":[\"log.message\",\"log.tags\"],\"id\":\"dedupe-match\",\"include\":\"*\",\"mode\":\"match\",\"type\":\"dedupe\"}]},{\"enabled\":true,\"id\":\"dedupe-group-2\",\"include\":\"*\",\"inputs\":[\"dedupe-group-1\"],\"processors\":[{\"enabled\":true,\"fields\":[\"log.source\",\"log.context\"],\"id\":\"dedupe-ignore\",\"include\":\"*\",\"mode\":\"ignore\",\"type\":\"dedupe\"}]}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"2cd3c342-e0c2-11f0-9d34-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-group-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"enabled\":true,\"id\":\"add-fields-group-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"processors\":[{\"enabled\":true,\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"},{\"name\":\"env\",\"value\":\"prod\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"type\":\"add_fields\"}]}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"20f4849c-e579-11f0-af79-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"fluent-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"fluent-source-1\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"id\":\"fluent-source-1\",\"tls\":{\"ca_file\":\"/etc/ssl/certs/ca.crt\",\"crt_file\":\"/etc/ssl/certs/fluent.crt\",\"key_file\":\"/etc/ssl/private/fluent.key\"},\"type\":\"fluentd\"}]}}},{\"id\":\"15621afe-e669-11f0-bec3-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"dfbeb25a-e6c1-11f0-9bc1-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"923fbdb6-e771-11f0-9388-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"http-client pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"http-source-1\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"auth_strategy\":\"basic\",\"decoding\":\"json\",\"id\":\"http-source-1\",\"scrape_interval_secs\":60,\"scrape_timeout_secs\":10,\"tls\":{\"crt_file\":\"/path/to/http.crt\"},\"type\":\"http_client\"}]}}},{\"id\":\"a7b600ce-e771-11f0-939c-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"newrelic pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"source-1\"],\"region\":\"us\",\"type\":\"new_relic\"}],\"processors\":[],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"306bab4c-e904-11f0-aa8a-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"splunk-hec-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"splunk-hec-source-1\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"id\":\"splunk-hec-source-1\",\"tls\":{\"ca_file\":\"/etc/ssl/certs/ca.crt\",\"crt_file\":\"/etc/ssl/certs/splunk.crt\",\"key_file\":\"/etc/ssl/private/splunk.key\"},\"type\":\"splunk_hec\"}]}}},{\"id\":\"51faefca-e922-11f0-a260-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"8d025dea-ea96-11f0-8a79-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"crowdstrike-next-gen-siem-destination-pipeline-basic\",\"config\":{\"destinations\":[{\"encoding\":\"raw_message\",\"id\":\"crowdstrike-dest-basic-1\",\"inputs\":[\"source-1\"],\"type\":\"crowdstrike_next_gen_siem\"}],\"processors\":[],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"ed4d493e-eabf-11f0-852d-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"29a560ae-ec7a-11f0-a7f4-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"34447006-ec7d-11f0-8885-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}],\"meta\":{\"totalCount\":16}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -53,13 +53,13 @@ "timeToLive": { "unlimited": true }, - "id": "2a1ecc5b-42fa-71d4-1e8a-9990a3446289" + "id": "f908afea-1e88-3d94-ee8f-10c7e579a929" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c162e83e-dc2c-11f0-bf6b-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/34447006-ec7d-11f0-8885-da7ad0900002", "keepAlive": false, "secure": true }, @@ -78,6 +78,6 @@ "timeToLive": { "unlimited": true }, - "id": "d8ab8d62-3a8b-2ffb-b86e-202601f9c376" + "id": "7689b28f-416d-0570-2e0d-5eb92f91aee2" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze index 92a0e9377f0..df682f19fd2 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:24.455Z \ No newline at end of file +2026-01-08T10:31:35.615Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json index bb05957ab93..fc7676cf499 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json @@ -7,12 +7,12 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c28a5ad0-dc2c-11f0-bdee-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"3574771e-ec7d-11f0-8887-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -27,7 +27,7 @@ "timeToLive": { "unlimited": true }, - "id": "d6a6f7a2-f347-564f-cbfb-e972ad921c99" + "id": "9ecc9b2e-1386-d424-2d27-09de086642dd" }, { "httpRequest": { @@ -37,7 +37,7 @@ }, "headers": {}, "method": "PUT", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/3574771e-ec7d-11f0-8887-da7ad0900002", "keepAlive": false, "secure": true }, @@ -57,13 +57,13 @@ "timeToLive": { "unlimited": true }, - "id": "6acef4da-c6d9-3beb-fcdd-2b5a9ed501e8" + "id": "9a28b63c-c076-494d-75a2-1add01e21145" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/3574771e-ec7d-11f0-8887-da7ad0900002", "keepAlive": false, "secure": true }, @@ -82,6 +82,6 @@ "timeToLive": { "unlimited": true }, - "id": "a5a337ff-4b63-4eb5-2643-23ba383eee73" + "id": "9061beae-7d1c-38ef-0964-511777e7a636" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze index b083fd34ae0..08083ddbff4 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:26.411Z \ No newline at end of file +2026-01-08T10:31:37.359Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.json index 25f172a08f7..5ea4b2ebcf4 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.json +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.json @@ -7,7 +7,7 @@ }, "headers": {}, "method": "PUT", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6", + "path": "/api/v2/obs-pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6", "keepAlive": false, "secure": true }, @@ -27,6 +27,6 @@ "timeToLive": { "unlimited": true }, - "id": "e34d84ff-45f4-43b4-0953-04142fd8332a" + "id": "089fee1d-abba-518e-75be-ce1a90edc48f" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze index b9cbd5fcac4..93f5da79a61 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:26.929Z \ No newline at end of file +2026-01-08T10:31:37.837Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json index 817d09683bb..f4bef7f0c32 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json @@ -7,12 +7,12 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "path": "/api/v2/obs-pipelines/pipelines", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c40401e0-dc2c-11f0-bf6d-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"36c0a3e0-ec7d-11f0-a7fc-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"processor-group-0\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"display_name\":\"My Processor Group\",\"enabled\":true,\"id\":\"processor-group-0\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"display_name\":\"My Filter Processor\",\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -27,7 +27,7 @@ "timeToLive": { "unlimited": true }, - "id": "d6a6f7a2-f347-564f-cbfb-e972ad921c97" + "id": "9ecc9b2e-1386-d424-2d27-09de086642db" }, { "httpRequest": { @@ -37,12 +37,12 @@ }, "headers": {}, "method": "PUT", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/36c0a3e0-ec7d-11f0-a7fc-da7ad0900002", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c40401e0-dc2c-11f0-bf6d-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Updated Pipeline Name\",\"config\":{\"destinations\":[{\"id\":\"updated-datadog-logs-destination-id\",\"inputs\":[\"my-processor-group\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"enabled\":true,\"id\":\"my-processor-group\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"36c0a3e0-ec7d-11f0-a7fc-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Updated Pipeline Name\",\"config\":{\"destinations\":[{\"id\":\"updated-datadog-logs-destination-id\",\"inputs\":[\"my-processor-group\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"enabled\":true,\"id\":\"my-processor-group\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"processors\":[{\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"status:error\",\"type\":\"filter\"}]}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/vnd.api+json" @@ -57,13 +57,13 @@ "timeToLive": { "unlimited": true }, - "id": "2c680eeb-05d9-7617-92c8-d73c172cec9c" + "id": "dba2746c-4b9d-1727-9294-da24d14bfeba" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002", + "path": "/api/v2/obs-pipelines/pipelines/36c0a3e0-ec7d-11f0-a7fc-da7ad0900002", "keepAlive": false, "secure": true }, @@ -82,6 +82,6 @@ "timeToLive": { "unlimited": true }, - "id": "b1525989-2e82-ec96-14f9-7184147ea0f7" + "id": "f28cda25-9016-2e9a-0665-36c8c4ccdcda" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.freeze index 745856e49a6..cc7370fd5c0 100644 --- a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.freeze +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:29.179Z \ No newline at end of file +2026-01-08T10:31:40.039Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.json index 17be41230e7..fa06bb48495 100644 --- a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.json +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.json @@ -7,7 +7,7 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "path": "/api/v2/obs-pipelines/pipelines/validate", "keepAlive": false, "secure": true }, @@ -27,6 +27,6 @@ "timeToLive": { "unlimited": true }, - "id": "863aee1f-0430-c0d4-eb65-21dbde4b995f" + "id": "672b67a5-8747-94e9-0e3a-12868e5db2c2" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.freeze index ecfcc54162a..fb8095bfe2b 100644 --- a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-12-18T16:15:29.647Z \ No newline at end of file +2026-01-08T10:31:40.500Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.json index 9b3fef29df6..d5d477b165b 100644 --- a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.json @@ -7,7 +7,7 @@ }, "headers": {}, "method": "POST", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "path": "/api/v2/obs-pipelines/pipelines/validate", "keepAlive": false, "secure": true }, @@ -27,6 +27,6 @@ "timeToLive": { "unlimited": true }, - "id": "621a711b-583d-6a39-1a85-b709238bf078" + "id": "77f6e197-9dba-2d03-a370-77dce88acbcd" } ] \ No newline at end of file diff --git a/src/test/resources/com/datadog/api/client/v2/api/given.json b/src/test/resources/com/datadog/api/client/v2/api/given.json index aa08181879c..f34bdccdee3 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/given.json +++ b/src/test/resources/com/datadog/api/client/v2/api/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature index c43fa8b3b76..a9b17ec7fdf 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature +++ b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict diff --git a/src/test/resources/com/datadog/api/client/v2/api/undo.json b/src/test/resources/com/datadog/api/client/v2/api/undo.json index 0867c10cd71..ef8bd02a1cf 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/undo.json +++ b/src/test/resources/com/datadog/api/client/v2/api/undo.json @@ -2838,6 +2838,49 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "DeletePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, + "GetPipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "UpdatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3449,49 +3492,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "DeletePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "idempotent" - } - }, - "GetPipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "UpdatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "idempotent" - } - }, "DeleteRestrictionPolicy": { "tag": "Restriction Policies", "undo": {