🛈 Note: This is pre-release documentation for the upcoming tracing 0.2.0 ecosystem.

For the release documentation, please see docs.rs, instead.

tracing_subscriber/subscribe/
layered.rs

1use tracing_core::{
2    collect::{Collect, Interest},
3    metadata::Metadata,
4    span, Dispatch, Event, LevelFilter,
5};
6
7use crate::{
8    filter,
9    registry::LookupSpan,
10    subscribe::{Context, Subscribe},
11};
12#[cfg(all(feature = "registry", feature = "std"))]
13use crate::{filter::FilterId, registry::Registry};
14use core::{
15    any::{Any, TypeId},
16    cmp, fmt,
17    marker::PhantomData,
18    ptr::NonNull,
19};
20
21/// A [collector] composed of a [collector] wrapped by one or more
22/// [subscriber]s.
23///
24/// [subscriber]: crate::Subscribe
25/// [collector]: tracing_core::Collect
26#[derive(Clone)]
27pub struct Layered<S, I, C = I> {
28    /// The subscriber.
29    subscriber: S,
30
31    /// The inner value that `self.subscriber` was layered onto.
32    ///
33    /// If this is also a `Subscribe`, then this `Layered` will implement `Subscribe`.
34    /// If this is a `Collect`, then this `Layered` will implement
35    /// `Collect` instead.
36    inner: I,
37
38    // These booleans are used to determine how to combine `Interest`s and max
39    // level hints when per-subscriber filters are in use.
40    /// Is `self.inner` a `Registry`?
41    ///
42    /// If so, when combining `Interest`s, we want to "bubble up" its
43    /// `Interest`.
44    inner_is_registry: bool,
45
46    /// Does `self.subscriber` have per-subscriber filters?
47    ///
48    /// This will be true if:
49    /// - `self.inner` is a `Filtered`.
50    /// - `self.inner` is a tree of `Layered`s where _all_ arms of those
51    ///   `Layered`s have per-subscriber filters.
52    ///
53    /// Otherwise, if it's a `Layered` with one per-subscriber filter in one branch,
54    /// but a non-per-subscriber-filtered subscriber in the other branch, this will be
55    /// _false_, because the `Layered` is already handling the combining of
56    /// per-subscriber filter `Interest`s and max level hints with its non-filtered
57    /// `Subscribe`.
58    has_subscriber_filter: bool,
59
60    /// Does `self.inner` have per-subscriber filters?
61    ///
62    /// This is determined according to the same rules as
63    /// `has_subscriber_filter` above.
64    inner_has_subscriber_filter: bool,
65    _s: PhantomData<fn(C)>,
66}
67
68// === impl Layered ===
69
70impl<S, C> Layered<S, C>
71where
72    S: Subscribe<C>,
73    C: Collect,
74{
75    /// Returns `true` if this `Collector` is the same type as `T`.
76    pub fn is<T: Any>(&self) -> bool {
77        self.downcast_ref::<T>().is_some()
78    }
79
80    /// Returns some reference to this `Collector` value if it is of type `T`,
81    /// or `None` if it isn't.
82    pub fn downcast_ref<T: Any>(&self) -> Option<&T> {
83        unsafe {
84            let raw = self.downcast_raw(TypeId::of::<T>())?;
85            Some(&*(raw.cast().as_ptr()))
86        }
87    }
88}
89
90impl<S, C> Collect for Layered<S, C>
91where
92    S: Subscribe<C>,
93    C: Collect,
94{
95    fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
96        self.pick_interest(self.subscriber.register_callsite(metadata), || {
97            self.inner.register_callsite(metadata)
98        })
99    }
100
101    fn enabled(&self, metadata: &Metadata<'_>) -> bool {
102        if self.subscriber.enabled(metadata, self.ctx()) {
103            // if the outer subscriber enables the callsite metadata, ask the collector.
104            self.inner.enabled(metadata)
105        } else {
106            // otherwise, the callsite is disabled by the subscriber
107
108            // If per-subscriber filters are in use, and we are short-circuiting
109            // (rather than calling into the inner type), clear the current
110            // per-subscriber filter `enabled` state.
111            #[cfg(feature = "registry")]
112            filter::FilterState::clear_enabled();
113
114            false
115        }
116    }
117
118    fn max_level_hint(&self) -> Option<LevelFilter> {
119        self.pick_level_hint(
120            self.subscriber.max_level_hint(),
121            self.inner.max_level_hint(),
122            super::collector_is_none(&self.inner),
123        )
124    }
125
126    fn new_span(&self, span: &span::Attributes<'_>) -> span::Id {
127        let id = self.inner.new_span(span);
128        self.subscriber.on_new_span(span, &id, self.ctx());
129        id
130    }
131
132    fn record(&self, span: &span::Id, values: &span::Record<'_>) {
133        self.inner.record(span, values);
134        self.subscriber.on_record(span, values, self.ctx());
135    }
136
137    fn record_follows_from(&self, span: &span::Id, follows: &span::Id) {
138        self.inner.record_follows_from(span, follows);
139        self.subscriber.on_follows_from(span, follows, self.ctx());
140    }
141
142    fn event_enabled(&self, event: &Event<'_>) -> bool {
143        if self.subscriber.event_enabled(event, self.ctx()) {
144            // if the outer subscriber enables the event, ask the inner collector.
145            self.inner.event_enabled(event)
146        } else {
147            // otherwise, the event is disabled by this subscriber
148            false
149        }
150    }
151
152    fn event(&self, event: &Event<'_>) {
153        self.inner.event(event);
154        self.subscriber.on_event(event, self.ctx());
155    }
156
157    fn enter(&self, span: &span::Id) {
158        self.inner.enter(span);
159        self.subscriber.on_enter(span, self.ctx());
160    }
161
162    fn exit(&self, span: &span::Id) {
163        self.inner.exit(span);
164        self.subscriber.on_exit(span, self.ctx());
165    }
166
167    fn clone_span(&self, old: &span::Id) -> span::Id {
168        let new = self.inner.clone_span(old);
169        if &new != old {
170            self.subscriber.on_id_change(old, &new, self.ctx())
171        };
172        new
173    }
174
175    #[inline]
176    fn drop_span(&self, id: span::Id) {
177        self.try_close(id);
178    }
179
180    fn try_close(&self, id: span::Id) -> bool {
181        #[cfg(all(feature = "registry", feature = "std"))]
182        let subscriber = &self.inner as &dyn Collect;
183        #[cfg(all(feature = "registry", feature = "std"))]
184        let mut guard = subscriber
185            .downcast_ref::<Registry>()
186            .map(|registry| registry.start_close(id.clone()));
187        if self.inner.try_close(id.clone()) {
188            // If we have a registry's close guard, indicate that the span is
189            // closing.
190            #[cfg(all(feature = "registry", feature = "std"))]
191            {
192                if let Some(g) = guard.as_mut() {
193                    g.set_closing()
194                };
195            }
196
197            self.subscriber.on_close(id, self.ctx());
198            true
199        } else {
200            false
201        }
202    }
203
204    #[inline]
205    fn current_span(&self) -> span::Current {
206        self.inner.current_span()
207    }
208
209    #[doc(hidden)]
210    unsafe fn downcast_raw(&self, id: TypeId) -> Option<NonNull<()>> {
211        // Unlike the implementation of `Subscribe` for `Layered`, we don't have to
212        // handle the "magic PSF downcast marker" here. If a `Layered`
213        // implements `Collect`, we already know that the `inner` branch is
214        // going to contain something that doesn't have per-subscriber filters (the
215        // actual root `Collect`). Thus, a `Layered` that implements
216        // `Collect` will always be propagating the root subscriber's
217        // `Interest`/level hint, even if it includes a `Subscribe` that has
218        // per-subscriber filters, because it will only ever contain subscribers where
219        // _one_ child has per-subscriber filters.
220        //
221        // The complex per-subscriber filter detection logic is only relevant to
222        // *trees* of subscribers, which involve the `Subscribe` implementation for
223        // `Layered`, not *lists* of subscribers, where every `Layered` implements
224        // `Collect`. Of course, a linked list can be thought of as a
225        // degenerate tree...but luckily, we are able to make a type-level
226        // distinction between individual `Layered`s that are definitely
227        // list-shaped (their inner child implements `Collect`), and
228        // `Layered`s that might be tree-shaped (the inner child is also a
229        // `Subscribe`).
230
231        // If downcasting to `Self`, return a pointer to `self`.
232        if id == TypeId::of::<Self>() {
233            return Some(NonNull::from(self).cast());
234        }
235
236        self.subscriber
237            .downcast_raw(id)
238            .or_else(|| self.inner.downcast_raw(id))
239    }
240}
241
242impl<C, A, B> Subscribe<C> for Layered<A, B, C>
243where
244    A: Subscribe<C>,
245    B: Subscribe<C>,
246    C: Collect,
247{
248    fn on_register_dispatch(&self, collector: &Dispatch) {
249        self.subscriber.on_register_dispatch(collector);
250        self.inner.on_register_dispatch(collector);
251    }
252
253    fn on_subscribe(&mut self, collect: &mut C) {
254        self.subscriber.on_subscribe(collect);
255        self.inner.on_subscribe(collect);
256    }
257
258    fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
259        self.pick_interest(self.subscriber.register_callsite(metadata), || {
260            self.inner.register_callsite(metadata)
261        })
262    }
263
264    fn enabled(&self, metadata: &Metadata<'_>, ctx: Context<'_, C>) -> bool {
265        if self.subscriber.enabled(metadata, ctx.clone()) {
266            // if the outer subscriber enables the callsite metadata, ask the inner subscriber.
267            self.inner.enabled(metadata, ctx)
268        } else {
269            // otherwise, the callsite is disabled by this subscriber
270            false
271        }
272    }
273
274    fn max_level_hint(&self) -> Option<LevelFilter> {
275        self.pick_level_hint(
276            self.subscriber.max_level_hint(),
277            self.inner.max_level_hint(),
278            super::subscriber_is_none(&self.inner),
279        )
280    }
281
282    #[inline]
283    fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, C>) {
284        self.inner.on_new_span(attrs, id, ctx.clone());
285        self.subscriber.on_new_span(attrs, id, ctx);
286    }
287
288    #[inline]
289    fn on_record(&self, span: &span::Id, values: &span::Record<'_>, ctx: Context<'_, C>) {
290        self.inner.on_record(span, values, ctx.clone());
291        self.subscriber.on_record(span, values, ctx);
292    }
293
294    #[inline]
295    fn on_follows_from(&self, span: &span::Id, follows: &span::Id, ctx: Context<'_, C>) {
296        self.inner.on_follows_from(span, follows, ctx.clone());
297        self.subscriber.on_follows_from(span, follows, ctx);
298    }
299
300    #[inline]
301    fn event_enabled(&self, event: &Event<'_>, ctx: Context<'_, C>) -> bool {
302        if self.subscriber.event_enabled(event, ctx.clone()) {
303            // if the outer subscriber enables the event, ask the inner collector.
304            self.inner.event_enabled(event, ctx)
305        } else {
306            // otherwise, the event is disabled by this subscriber
307            false
308        }
309    }
310
311    #[inline]
312    fn on_event(&self, event: &Event<'_>, ctx: Context<'_, C>) {
313        self.inner.on_event(event, ctx.clone());
314        self.subscriber.on_event(event, ctx);
315    }
316
317    #[inline]
318    fn on_enter(&self, id: &span::Id, ctx: Context<'_, C>) {
319        self.inner.on_enter(id, ctx.clone());
320        self.subscriber.on_enter(id, ctx);
321    }
322
323    #[inline]
324    fn on_exit(&self, id: &span::Id, ctx: Context<'_, C>) {
325        self.inner.on_exit(id, ctx.clone());
326        self.subscriber.on_exit(id, ctx);
327    }
328
329    #[inline]
330    fn on_close(&self, id: span::Id, ctx: Context<'_, C>) {
331        self.inner.on_close(id.clone(), ctx.clone());
332        self.subscriber.on_close(id, ctx);
333    }
334
335    #[inline]
336    fn on_id_change(&self, old: &span::Id, new: &span::Id, ctx: Context<'_, C>) {
337        self.inner.on_id_change(old, new, ctx.clone());
338        self.subscriber.on_id_change(old, new, ctx);
339    }
340
341    #[doc(hidden)]
342    unsafe fn downcast_raw(&self, id: TypeId) -> Option<NonNull<()>> {
343        match id {
344            // If downcasting to `Self`, return a pointer to `self`.
345            id if id == TypeId::of::<Self>() => Some(NonNull::from(self).cast()),
346
347            // Oh, we're looking for per-subscriber filters!
348            //
349            // This should only happen if we are inside of another `Layered`,
350            // and it's trying to determine how it should combine `Interest`s
351            // and max level hints.
352            //
353            // In that case, this `Layered` should be considered to be
354            // "per-subscriber filtered" if *both* the outer subscriber and the inner
355            // subscriber/subscriber have per-subscriber filters. Otherwise, this `Layered
356            // should *not* be considered per-subscriber filtered (even if one or the
357            // other has per subscriber filters). If only one `Subscribe` is per-subscriber
358            // filtered, *this* `Layered` will handle aggregating the `Interest`
359            // and level hints on behalf of its children, returning the
360            // aggregate (which is the value from the &non-per-subscriber-filtered*
361            // child).
362            //
363            // Yes, this rule *is* slightly counter-intuitive, but it's
364            // necessary due to a weird edge case that can occur when two
365            // `Layered`s where one side is per-subscriber filtered and the other
366            // isn't are `Layered` together to form a tree. If we didn't have
367            // this rule, we would actually end up *ignoring* `Interest`s from
368            // the non-per-subscriber-filtered subscribers, since both branches would
369            // claim to have PSF.
370            //
371            // If you don't understand this...that's fine, just don't mess with
372            // it. :)
373            id if filter::is_psf_downcast_marker(id) => self
374                .subscriber
375                .downcast_raw(id)
376                .and(self.inner.downcast_raw(id)),
377
378            // Otherwise, try to downcast both branches normally...
379            _ => self
380                .subscriber
381                .downcast_raw(id)
382                .or_else(|| self.inner.downcast_raw(id)),
383        }
384    }
385}
386
387impl<'a, S, C> LookupSpan<'a> for Layered<S, C>
388where
389    C: Collect + LookupSpan<'a>,
390{
391    type Data = C::Data;
392
393    fn span_data(&'a self, id: &span::Id) -> Option<Self::Data> {
394        self.inner.span_data(id)
395    }
396
397    #[cfg(all(feature = "registry", feature = "std"))]
398    fn register_filter(&mut self) -> FilterId {
399        self.inner.register_filter()
400    }
401}
402
403impl<S, C> Layered<S, C>
404where
405    C: Collect,
406{
407    fn ctx(&self) -> Context<'_, C> {
408        Context::new(&self.inner)
409    }
410}
411
412impl<A, B, C> Layered<A, B, C>
413where
414    A: Subscribe<C>,
415    C: Collect,
416{
417    pub(super) fn new(subscriber: A, inner: B, inner_has_subscriber_filter: bool) -> Self {
418        #[cfg(all(feature = "registry", feature = "std"))]
419        let inner_is_registry = TypeId::of::<C>() == TypeId::of::<crate::registry::Registry>();
420        #[cfg(not(all(feature = "registry", feature = "std")))]
421        let inner_is_registry = false;
422
423        let inner_has_subscriber_filter = inner_has_subscriber_filter || inner_is_registry;
424        let has_subscriber_filter = filter::subscriber_has_psf(&subscriber);
425        Self {
426            subscriber,
427            inner,
428            has_subscriber_filter,
429            inner_has_subscriber_filter,
430            inner_is_registry,
431            _s: PhantomData,
432        }
433    }
434
435    fn pick_interest(&self, outer: Interest, inner: impl FnOnce() -> Interest) -> Interest {
436        if self.has_subscriber_filter {
437            return inner();
438        }
439
440        // If the outer subscriber has disabled the callsite, return now so that
441        // the inner subscriber/subscriber doesn't get its hopes up.
442        if outer.is_never() {
443            // If per-layer filters are in use, and we are short-circuiting
444            // (rather than calling into the inner type), clear the current
445            // per-layer filter interest state.
446            #[cfg(feature = "registry")]
447            filter::FilterState::take_interest();
448
449            return outer;
450        }
451
452        // The `inner` closure will call `inner.register_callsite()`. We do this
453        // before the `if` statement to  ensure that the inner subscriber is
454        // informed that the callsite exists regardless of the outer subscriber's
455        // filtering decision.
456        let inner = inner();
457        if outer.is_sometimes() {
458            // if this interest is "sometimes", return "sometimes" to ensure that
459            // filters are reevaluated.
460            return outer;
461        }
462
463        // If there is a per-subscriber filter in the `inner` stack, and it returns
464        // `never`, change the interest to `sometimes`, because the `outer`
465        // subscriber didn't return `never`. This means that _some_ subscriber still wants
466        // to see that callsite, even though the inner stack's per-subscriber filter
467        // didn't want it. Therefore, returning `sometimes` will ensure
468        // `enabled` is called so that the per-subscriber filter can skip that
469        // span/event, while the `outer` subscriber still gets to see it.
470        if inner.is_never() && self.inner_has_subscriber_filter {
471            return Interest::sometimes();
472        }
473
474        // otherwise, allow the inner subscriber or collector to weigh in.
475        inner
476    }
477
478    fn pick_level_hint(
479        &self,
480        outer_hint: Option<LevelFilter>,
481        inner_hint: Option<LevelFilter>,
482        inner_is_none: bool,
483    ) -> Option<LevelFilter> {
484        if self.inner_is_registry {
485            return outer_hint;
486        }
487
488        if self.has_subscriber_filter && self.inner_has_subscriber_filter {
489            return Some(cmp::max(outer_hint?, inner_hint?));
490        }
491
492        if self.has_subscriber_filter && inner_hint.is_none() {
493            return None;
494        }
495
496        if self.inner_has_subscriber_filter && outer_hint.is_none() {
497            return None;
498        }
499
500        // If the subscriber is `Option::None`, then we
501        // want to short-circuit the layer underneath, if it
502        // returns `None`, to override the `None` layer returning
503        // `Some(OFF)`, which should ONLY apply when there are
504        // no other layers that return `None`. Note this
505        // `None` does not == `Some(TRACE)`, it means
506        // something more like: "whatever all the other
507        // layers agree on, default to `TRACE` if none
508        // have an opinion". We also choose do this AFTER
509        // we check for per-subscriber filters, which
510        // have their own logic.
511        //
512        // Also note that this does come at some perf cost, but
513        // this function is only called on initialization and
514        // subscriber reloading.
515        if super::subscriber_is_none(&self.subscriber) {
516            return cmp::max(outer_hint, Some(inner_hint?));
517        }
518
519        // Similarly, if the layer on the inside is `None` and it returned an
520        // `Off` hint, we want to override that with the outer hint.
521        if inner_is_none && inner_hint == Some(LevelFilter::OFF) {
522            return outer_hint;
523        }
524
525        cmp::max(outer_hint, inner_hint)
526    }
527}
528
529impl<A, B, S> fmt::Debug for Layered<A, B, S>
530where
531    A: fmt::Debug,
532    B: fmt::Debug,
533{
534    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
535        #[cfg(all(feature = "registry", feature = "std"))]
536        let alt = f.alternate();
537        let mut s = f.debug_struct("Layered");
538        // These additional fields are more verbose and usually only necessary
539        // for internal debugging purposes, so only print them if alternate mode
540        // is enabled.
541        #[cfg(all(feature = "registry", feature = "std"))]
542        if alt {
543            s.field("inner_is_registry", &self.inner_is_registry)
544                .field("has_subscriber_filter", &self.has_subscriber_filter)
545                .field(
546                    "inner_has_subscriber_filter",
547                    &self.inner_has_subscriber_filter,
548                );
549        }
550
551        s.field("subscriber", &self.subscriber)
552            .field("inner", &self.inner)
553            .finish()
554    }
555}