Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
4b9cbb4
relaly big wip
Dav1dde Feb 11, 2026
9caae16
wip
Dav1dde Feb 12, 2026
c5abd79
very close
Dav1dde Feb 13, 2026
0d3fd0a
begin fixing outcomes
Dav1dde Feb 23, 2026
b27bb11
solve debug stack overflow (Items) and run apple/minidump processing …
Dav1dde Feb 23, 2026
ed02ff2
some unreal checkpoint
Dav1dde Feb 23, 2026
0e0676f
some progress, rm errorref, size limits, stuck at objectstore
Dav1dde Feb 25, 2026
eb6026b
progress, rate limits, outcomes crashes
Dav1dde Mar 3, 2026
b36efdc
Merge remote-tracking branch 'origin/master' into dav1d/error-processor
Dav1dde Mar 3, 2026
4c6e63d
rest of the owl
Dav1dde Mar 3, 2026
2043cb2
fixes and cleanup
Dav1dde Mar 3, 2026
6f255b5
ref(server): Remove profile type header in favor of platform header
Dav1dde Feb 27, 2026
0e4069c
some fixes
Dav1dde Mar 3, 2026
e3e1ba5
fix stack overflow
Dav1dde Mar 3, 2026
8fe0c1c
non processing gates
Dav1dde Mar 3, 2026
548cf1d
apply rate limit fix
Dav1dde Mar 3, 2026
b09638d
attachments fix
Dav1dde Mar 3, 2026
10649da
attachments fix
Dav1dde Mar 3, 2026
0378392
doc string fixes
Dav1dde Mar 3, 2026
7ecc8a1
nswitch test fixes
Dav1dde Mar 3, 2026
46a0e29
more cleanup
Dav1dde Mar 3, 2026
5c4c7d6
more cfg processing
Dav1dde Mar 3, 2026
01a465d
Merge branch 'master' into dav1d/error-processor
Dav1dde Mar 4, 2026
ded7297
code review: destructure, better comments
Dav1dde Mar 5, 2026
288d2dd
code review: tuple structs
Dav1dde Mar 5, 2026
be4ed1c
Merge remote-tracking branch 'origin/master' into dav1d/error-processor
Dav1dde Mar 5, 2026
c34b165
bugbot findings
Dav1dde Mar 5, 2026
764e7f5
bugbot fix
Dav1dde Mar 5, 2026
b6e6a86
event bug
Dav1dde Mar 5, 2026
5ccff39
metrics bug and linter problems
Dav1dde Mar 5, 2026
6bd63cc
Merge branch 'master' into dav1d/error-processor
Dav1dde Mar 5, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions relay-dynamic-config/src/feature.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,9 @@ pub enum Feature {
/// Enable the upload endpoint for attachments.
#[serde(rename = "projects:relay-upload-endpoint")]
UploadEndpoint,
/// Enable the new Error processing pipeline in Relay.
#[serde(rename = "organizations:relay-new-error-processing")]
NewErrorProcessing,
/// Enable the new Client Reports pipeline in Relay.
#[serde(rename = "organizations:new-client-report-processing")]
NewClientReportProcessing,
Expand Down
10 changes: 10 additions & 0 deletions relay-server/src/constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ use std::time::Duration;

include!(concat!(env!("OUT_DIR"), "/constants.gen.rs"));

/// Name of the custom tag in the crash user data for Sentry event payloads.
#[cfg(feature = "processing")]
pub const SENTRY_CRASH_PAYLOAD_KEY: &str = "__sentry";

/// Name of the event attachment.
///
/// This is a special attachment that can contain a sentry event payload encoded as message pack.
Expand Down Expand Up @@ -38,3 +42,9 @@ pub const DEFAULT_CHECK_IN_CLIENT: &str = "relay-http";
/// The default retention for attachment, which defaults to 30 days currently.
#[cfg(feature = "processing")]
pub const DEFAULT_ATTACHMENT_RETENTION: Duration = Duration::from_hours(24 * 30);

/// Magic number indicating the dying message file is encoded by sentry-switch SDK.
pub const NNSWITCH_SENTRY_MAGIC: &[u8] = b"sntr";

/// The file name that Nintendo uses to in the events they forward.
pub const NNSWITCH_DYING_MESSAGE_FILENAME: &str = "dying_message.dat";
5 changes: 5 additions & 0 deletions relay-server/src/envelope/attachment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ pub enum AttachmentType {
// A prosperodump crash report (binary data)
Prosperodump,

/// A Nintendo Switch dying message.
NintendoSwitchDyingMessage,

/// This is a binary attachment present in Unreal 4 events containing event context information.
///
/// This can be deserialized using the `symbolic` crate see
Expand Down Expand Up @@ -60,6 +63,7 @@ impl fmt::Display for AttachmentType {
AttachmentType::EventPayload => write!(f, "event.payload"),
AttachmentType::Prosperodump => write!(f, "playstation.prosperodump"),
AttachmentType::Breadcrumbs => write!(f, "event.breadcrumbs"),
AttachmentType::NintendoSwitchDyingMessage => write!(f, "nswitch.dying_message"),
AttachmentType::UnrealContext => write!(f, "unreal.context"),
AttachmentType::UnrealLogs => write!(f, "unreal.logs"),
AttachmentType::ViewHierarchy => write!(f, "event.view_hierarchy"),
Expand All @@ -80,6 +84,7 @@ impl std::str::FromStr for AttachmentType {
"event.applecrashreport" => AttachmentType::AppleCrashReport,
"event.payload" => AttachmentType::EventPayload,
"playstation.prosperodump" => AttachmentType::Prosperodump,
"nswitch.dying_message" => AttachmentType::NintendoSwitchDyingMessage,
"event.breadcrumbs" => AttachmentType::Breadcrumbs,
"event.view_hierarchy" => AttachmentType::ViewHierarchy,
"unreal.context" => AttachmentType::UnrealContext,
Expand Down
20 changes: 18 additions & 2 deletions relay-server/src/envelope/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,22 @@ impl Item {
/// Returns the attachment type if this item is an attachment.
pub fn attachment_type(&self) -> Option<AttachmentType> {
// TODO: consider to replace this with an ItemType?
self.headers.get(&ItemHeaderKey::AttachmentType)
if let Some(ty) = self.headers.get(&ItemHeaderKey::AttachmentType) {
return Some(ty);
}

// Unfortunately when the switch protocol was decided on, it was missed to assign it a new
// attachment type, that's why we have to infer it here from the filename and contents.
if self.ty() == &ItemType::Attachment
&& self.filename() == Some(crate::constants::NNSWITCH_DYING_MESSAGE_FILENAME)
&& self
.payload
.starts_with(crate::constants::NNSWITCH_SENTRY_MAGIC)
{
return Some(AttachmentType::NintendoSwitchDyingMessage);
}

None
Comment thread
Dav1dde marked this conversation as resolved.
}

/// Sets the attachment type of this item.
Expand Down Expand Up @@ -625,7 +640,8 @@ impl Item {
| AttachmentType::Minidump
| AttachmentType::EventPayload
| AttachmentType::Prosperodump
| AttachmentType::Breadcrumbs,
| AttachmentType::Breadcrumbs
| AttachmentType::NintendoSwitchDyingMessage,
) => true,
Some(
AttachmentType::Attachment
Expand Down
11 changes: 10 additions & 1 deletion relay-server/src/envelope/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,15 @@ impl<M> EnvelopeHeaders<M> {
pub fn sent_at(&self) -> Option<DateTime<Utc>> {
self.sent_at
}

/// Returns the specified header value, if present.
pub fn get_header<K>(&self, name: &K) -> Option<&Value>
where
String: Borrow<K>,
K: Ord + ?Sized,
{
self.other.get(name)
}
}

#[doc(hidden)]
Expand Down Expand Up @@ -476,7 +485,7 @@ impl Envelope {
String: Borrow<K>,
K: Ord + ?Sized,
{
self.headers.other.get(name)
self.headers.get_header(name)
}

/// Sets the specified header value, returning the previous one if present.
Expand Down
16 changes: 8 additions & 8 deletions relay-server/src/managed/counted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ where
}
}

impl<T: Counted> Counted for Vec<T> {
impl<T: Counted> Counted for [T] {
fn quantities(&self) -> Quantities {
let mut quantities = BTreeMap::new();
for element in self {
Expand All @@ -251,14 +251,14 @@ impl<T: Counted> Counted for Vec<T> {
}
}

impl<T: Counted> Counted for Vec<T> {
fn quantities(&self) -> Quantities {
self.as_slice().quantities()
}
}

impl<T: Counted, const N: usize> Counted for SmallVec<[T; N]> {
fn quantities(&self) -> Quantities {
let mut quantities = BTreeMap::new();
for element in self {
for (category, size) in element.quantities() {
*quantities.entry(category).or_default() += size;
}
}
quantities.into_iter().collect()
self.as_slice().quantities()
Comment thread
Dav1dde marked this conversation as resolved.
}
}
6 changes: 3 additions & 3 deletions relay-server/src/processing/common.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use crate::Envelope;
use crate::managed::{Managed, Rejected};
use crate::processing::ForwardContext;
#[cfg(feature = "processing")]
use crate::processing::StoreHandle;
use crate::processing::check_ins::CheckInsProcessor;
use crate::processing::errors::ErrorsProcessor;
use crate::processing::logs::LogsProcessor;
use crate::processing::profile_chunks::ProfileChunksProcessor;
use crate::processing::replays::ReplaysProcessor;
Expand Down Expand Up @@ -37,7 +36,7 @@ macro_rules! outputs {
#[cfg(feature = "processing")]
fn forward_store(
self,
s: StoreHandle<'_>,
s: crate::processing::StoreHandle<'_>,
ctx: ForwardContext<'_>,
) -> Result<(), Rejected<()>> {
match self {
Expand All @@ -60,6 +59,7 @@ macro_rules! outputs {

outputs!(
CheckIns => CheckInsProcessor,
Errors => ErrorsProcessor,
Logs => LogsProcessor,
ProfileChunks => ProfileChunksProcessor,
Sessions => SessionsProcessor,
Expand Down
88 changes: 88 additions & 0 deletions relay-server/src/processing/errors/dynamic_sampling.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
use chrono::Utc;
use relay_event_schema::protocol::{Contexts, TraceContext};
use relay_protocol::{Annotated, Empty as _};
use relay_sampling::config::RuleType;
use relay_sampling::evaluation::SamplingEvaluator;

use crate::managed::Managed;
use crate::processing::Context;
use crate::processing::errors::ExpandedError;
use crate::utils::SamplingResult;

/// Applies a dynamic sampling decision onto the error.
///
/// The function validates the DSC as well as a tagging the error event with the sampling decision
/// of the associated trace.
pub async fn apply(error: &mut Managed<ExpandedError>, ctx: Context<'_>) {
// Only run in processing to not compute the decision multiple times and it is the most
// accurate place, as other Relays may have unsupported inbound filter or sampling configs.
if !ctx.is_processing() {
return;
}

if ctx.sampling_project_info.is_none() {
// If there is a DSC, the current project does not have access to the sampling project
// -> remove the DSC.
error.modify(|error, _| error.headers.remove_dsc());
return;
}

if let Some(sampled) = is_trace_fully_sampled(error, ctx).await {
error.modify(|error, _| tag_error_with_sampling_decision(error, sampled));
};
}
Comment thread
Dav1dde marked this conversation as resolved.

fn tag_error_with_sampling_decision(error: &mut ExpandedError, sampled: bool) {
let Some(event) = error.event.value_mut() else {
return;
};

// We want to get the trace context, in which we will inject the `sampled` field.
let context = event
.contexts
.get_or_insert_with(Contexts::new)
.get_or_default::<TraceContext>();

// We want to update `sampled` only if it was not set, since if we don't check this
// we will end up overriding the value set by downstream Relays and this will lead
// to more complex debugging in case of problems.
if context.sampled.is_empty() {
relay_log::trace!("tagged error with `sampled = {}` flag", sampled);
context.sampled = Annotated::new(sampled);
}
}

/// Runs dynamic sampling if the dsc and root project state are not None and returns whether the
/// transactions received with such dsc and project state would be kept or dropped by dynamic
/// sampling.
async fn is_trace_fully_sampled(error: &ExpandedError, ctx: Context<'_>) -> Option<bool> {
let dsc = error.headers.dsc()?;

let sampling_config = ctx
.sampling_project_info
.and_then(|s| s.config.sampling.as_ref())
.and_then(|s| s.as_ref().ok())?;

if sampling_config.unsupported() {
if ctx.is_processing() {
relay_log::error!("found unsupported rules even as processing relay");
}

return None;
}

// If the sampled field is not set, we prefer to not tag the error since we have no clue on
// whether the head of the trace was kept or dropped on the client side.
// In addition, if the head of the trace was dropped on the client we will immediately mark
// the trace as not fully sampled.
if !dsc.sampled? {
return Some(false);
}

let evaluator = SamplingEvaluator::new(Utc::now());

let rules = sampling_config.filter_rules(RuleType::Trace);

let evaluation = evaluator.match_rules(*dsc.trace_id, dsc, rules).await;
Some(SamplingResult::from(evaluation).decision().is_keep())
}
75 changes: 75 additions & 0 deletions relay-server/src/processing/errors/errors/apple_crash_report.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
use relay_quotas::{DataCategory, RateLimits};

use crate::envelope::{AttachmentType, Item, ItemType};
use crate::managed::{Counted, Quantities, RecordKeeper};
use crate::processing::ForwardContext;
use crate::processing::errors::errors::{Context, Expansion, SentryError, utils};
use crate::processing::errors::{Error, Result};

#[derive(Debug)]
pub struct AppleCrashReport(pub Item);

impl SentryError for AppleCrashReport {
fn try_expand(items: &mut Vec<Item>, ctx: Context<'_>) -> Result<Option<Expansion<Self>>> {
let Some(apple_crash_report) = utils::take_item_by(items, |item| {
item.attachment_type() == Some(AttachmentType::AppleCrashReport)
}) else {
return Ok(None);
};

let mut metrics = Default::default();
#[cfg_attr(not(feature = "processing"), expect(unused_mut))]
let mut event = utils::take_event_from_crash_items(items, &mut metrics, ctx)?;

utils::if_processing!(ctx, {
crate::utils::process_apple_crash_report(
event.get_or_insert_with(Default::default),
&apple_crash_report.payload(),
);
metrics.bytes_ingested_event_applecrashreport =
(apple_crash_report.len() as u64).into();
});

Ok(Some(Expansion {
event: Box::new(event),
attachments: utils::take_items_of_type(items, ItemType::Attachment),
user_reports: utils::take_items_of_type(items, ItemType::UserReport),
error: Self(apple_crash_report),
metrics,
fully_normalized: false,
}))
}

fn apply_rate_limit(
&mut self,
_category: DataCategory,
limits: RateLimits,
records: &mut RecordKeeper<'_>,
) -> Result<()> {
if !self.0.rate_limited() {
self.0.set_rate_limited(true);
records.reject_err(Error::RateLimited(limits), &self.0);
}

Ok(())
}

fn serialize_into(self, items: &mut Vec<Item>, _ctx: ForwardContext<'_>) -> Result<()> {
items.push(self.0);
Ok(())
}
}

impl Counted for AppleCrashReport {
fn quantities(&self) -> Quantities {
// A rate limited crash report no longer counts as an attachment, but it is still passed
// along to have its data later extracted into an error (Symbolication).
//
// The rate limited information is passed along and will lead to the item later to be
// dropped.
match self.0.rate_limited() {
true => Default::default(),
false => self.0.quantities(),
}
}
}
42 changes: 42 additions & 0 deletions relay-server/src/processing/errors/errors/generic.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
use crate::envelope::{Item, ItemType};
use crate::managed::{Counted, Quantities};
use crate::processing::errors::Result;
use crate::processing::errors::errors::{Context, Expansion, SentryError, utils};

#[derive(Debug)]
pub struct Generic;

impl SentryError for Generic {
fn try_expand(items: &mut Vec<Item>, ctx: Context<'_>) -> Result<Option<Expansion<Self>>> {
let Some(ev) = utils::take_item_of_type(items, ItemType::Event) else {
return Ok(None);
};
Comment thread
Dav1dde marked this conversation as resolved.

let fully_normalized = ev.fully_normalized();
let mut metrics = Default::default();

let mut event = utils::event_from_json_payload(ev, None, &mut metrics, ctx)?;

let skip_normalization = ctx.processing.is_processing() && fully_normalized;
if !skip_normalization && let Some(event) = event.value_mut() {
// Event items can never include transactions, so retain the event type and let
// inference deal with this during normalization.
event.ty.set_value(None);
}

Ok(Some(Expansion {
event: Box::new(event),
attachments: utils::take_items_of_type(items, ItemType::Attachment),
user_reports: utils::take_items_of_type(items, ItemType::UserReport),
error: Self,
metrics,
fully_normalized,
}))
}
}

impl Counted for Generic {
fn quantities(&self) -> Quantities {
Default::default()
}
}
Loading
Loading