Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion .generator/schemas/v1/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5734,6 +5734,12 @@ components:
Scope down exclusion filter to only a subset of logs with a log query.
example: "*"
type: string
sample_attribute:
description: |-
Sample attribute to use for the sampling of logs going through this exclusion filter.
When set, only the logs with the specified attribute are sampled.
example: "@ci.job_id"
type: string
sample_rate:
description: |-
Sample rate to apply to logs going through this exclusion filter,
Expand Down Expand Up @@ -29274,7 +29280,7 @@ paths:
Update an index as identified by its name.
Returns the Index object passed in the request body when the request is successful.

Using the `PUT` method updates your indexs configuration by **replacing**
Using the `PUT` method updates your index's configuration by **replacing**
your current configuration with the new one sent to your Datadog organization.
operationId: UpdateLogsIndex
parameters:
Expand Down
6 changes: 5 additions & 1 deletion examples/v1/logs-indexes/CreateLogsIndex.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,11 @@ public static void main(String[] args) {
.exclusionFilters(
Collections.singletonList(
new LogsExclusion()
.filter(new LogsExclusionFilter().query("*").sampleRate(1.0))
.filter(
new LogsExclusionFilter()
.query("*")
.sampleAttribute("@ci.job_id")
.sampleRate(1.0))
.name("payment")))
.filter(new LogsFilter().query("source:python"))
.name("main")
Expand Down
6 changes: 5 additions & 1 deletion examples/v1/logs-indexes/UpdateLogsIndex.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,11 @@ public static void main(String[] args) {
.exclusionFilters(
Collections.singletonList(
new LogsExclusion()
.filter(new LogsExclusionFilter().query("*").sampleRate(1.0))
.filter(
new LogsExclusionFilter()
.query("*")
.sampleAttribute("@ci.job_id")
.sampleRate(1.0))
.name("payment")))
.filter(new LogsFilter().query("source:python"))
.numFlexLogsRetentionDays(360L)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -698,7 +698,7 @@ public CompletableFuture<LogsIndex> updateLogsIndexAsync(
* Update an index as identified by its name. Returns the Index object passed in the request body
* when the request is successful.
*
* <p>Using the <code>PUT</code> method updates your indexs configuration by
* <p>Using the <code>PUT</code> method updates your index's configuration by
* <strong>replacing</strong> your current configuration with the new one sent to your Datadog
* organization.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
/** Exclusion filter is defined by a query, a sampling rule, and a active/inactive toggle. */
@JsonPropertyOrder({
LogsExclusionFilter.JSON_PROPERTY_QUERY,
LogsExclusionFilter.JSON_PROPERTY_SAMPLE_ATTRIBUTE,
LogsExclusionFilter.JSON_PROPERTY_SAMPLE_RATE
})
@jakarta.annotation.Generated(
Expand All @@ -29,6 +30,9 @@ public class LogsExclusionFilter {
public static final String JSON_PROPERTY_QUERY = "query";
private String query;

public static final String JSON_PROPERTY_SAMPLE_ATTRIBUTE = "sample_attribute";
private String sampleAttribute;

public static final String JSON_PROPERTY_SAMPLE_RATE = "sample_rate";
private Double sampleRate;

Expand Down Expand Up @@ -62,6 +66,28 @@ public void setQuery(String query) {
this.query = query;
}

public LogsExclusionFilter sampleAttribute(String sampleAttribute) {
this.sampleAttribute = sampleAttribute;
return this;
}

/**
* Sample attribute to use for the sampling of logs going through this exclusion filter. When set,
* only the logs with the specified attribute are sampled.
*
* @return sampleAttribute
*/
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_SAMPLE_ATTRIBUTE)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public String getSampleAttribute() {
return sampleAttribute;
}

public void setSampleAttribute(String sampleAttribute) {
this.sampleAttribute = sampleAttribute;
}

public LogsExclusionFilter sampleRate(Double sampleRate) {
this.sampleRate = sampleRate;
return this;
Expand Down Expand Up @@ -140,20 +166,22 @@ public boolean equals(Object o) {
}
LogsExclusionFilter logsExclusionFilter = (LogsExclusionFilter) o;
return Objects.equals(this.query, logsExclusionFilter.query)
&& Objects.equals(this.sampleAttribute, logsExclusionFilter.sampleAttribute)
&& Objects.equals(this.sampleRate, logsExclusionFilter.sampleRate)
&& Objects.equals(this.additionalProperties, logsExclusionFilter.additionalProperties);
}

@Override
public int hashCode() {
return Objects.hash(query, sampleRate, additionalProperties);
return Objects.hash(query, sampleAttribute, sampleRate, additionalProperties);
}

@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class LogsExclusionFilter {\n");
sb.append(" query: ").append(toIndentedString(query)).append("\n");
sb.append(" sampleAttribute: ").append(toIndentedString(sampleAttribute)).append("\n");
sb.append(" sampleRate: ").append(toIndentedString(sampleRate)).append("\n");
sb.append(" additionalProperties: ")
.append(toIndentedString(additionalProperties))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,21 @@ Feature: Logs Indexes
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
Scenario: Create an index returns "Invalid Parameter Error" response
Given new "CreateLogsIndex" request
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_attribute": "@ci.job_id", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
When the request is sent
Then the response status is 400 Invalid Parameter Error

@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
Scenario: Create an index returns "OK" response
Given new "CreateLogsIndex" request
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_attribute": "@ci.job_id", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
When the request is sent
Then the response status is 200 OK

@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
Scenario: Create an index returns "Unprocessable Entity" response
Given new "CreateLogsIndex" request
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_attribute": "@ci.job_id", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
When the request is sent
Then the response status is 422 Unprocessable Entity

Expand Down Expand Up @@ -73,15 +73,15 @@ Feature: Logs Indexes
Scenario: Update an index returns "Invalid Parameter Error" response
Given new "UpdateLogsIndex" request
And request contains "name" parameter from "REPLACE.ME"
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "disable_daily_limit": false, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "disable_daily_limit": false, "exclusion_filters": [{"filter": {"query": "*", "sample_attribute": "@ci.job_id", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
When the request is sent
Then the response status is 400 Invalid Parameter Error

@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
Scenario: Update an index returns "OK" response
Given new "UpdateLogsIndex" request
And request contains "name" parameter from "REPLACE.ME"
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "disable_daily_limit": false, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "disable_daily_limit": false, "exclusion_filters": [{"filter": {"query": "*", "sample_attribute": "@ci.job_id", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "num_flex_logs_retention_days": 360, "num_retention_days": 15, "tags": ["team:backend", "env:production"]}
When the request is sent
Then the response status is 200 OK

Expand Down
Loading