🛈 Note: This is pre-release documentation for the upcoming tracing 0.2.0 ecosystem.

For the release documentation, please see docs.rs, instead.

tracing_core/
dispatch.rs

1//! Dispatches trace events to [`Collect`]s.
2//!
3//! The _dispatcher_ is the component of the tracing system which is responsible
4//! for forwarding trace data from the instrumentation points that generate it
5//! to the collector that collects it.
6//!
7//! # Using the Trace Dispatcher
8//!
9//! Every thread in a program using `tracing` has a _default collector_. When
10//! events occur, or spans are created, they are dispatched to the thread's
11//! current collector.
12//!
13//! ## Setting the Default Collector
14//!
15//! By default, the current collector is an empty implementation that does
16//! nothing. Trace data provided to this "do nothing" implementation is
17//! immediately discarded, and is not available for any purpose.
18//!
19//! To use another collector implementation, it must be set as the default.
20//! There are two methods for doing so: [`with_default`] and
21//! [`set_global_default`]. `with_default` sets the default collector for the
22//! duration of a scope, while `set_global_default` sets a default collector
23//! for the entire process.
24//!
25//! To use either of these functions, we must first wrap our collector in a
26//! [`Dispatch`], a cloneable, type-erased reference to a collector. For
27//! example:
28//! ```rust
29//! # pub struct FooCollector;
30//! # use tracing_core::{
31//! #   dispatch, Event, Metadata,
32//! #   span::{Attributes, Current, Id, Record}
33//! # };
34//! # impl tracing_core::Collect for FooCollector {
35//! #   fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
36//! #   fn record(&self, _: &Id, _: &Record) {}
37//! #   fn event(&self, _: &Event) {}
38//! #   fn record_follows_from(&self, _: &Id, _: &Id) {}
39//! #   fn enabled(&self, _: &Metadata) -> bool { false }
40//! #   fn enter(&self, _: &Id) {}
41//! #   fn exit(&self, _: &Id) {}
42//! #   fn current_span(&self) -> Current { Current::unknown() }
43//! # }
44//! # impl FooCollector { fn new() -> Self { FooCollector } }
45//! # #[cfg(feature = "alloc")]
46//! use dispatch::Dispatch;
47//!
48//! # #[cfg(feature = "alloc")]
49//! let my_collector = FooCollector::new();
50//! # #[cfg(feature = "alloc")]
51//! let my_dispatch = Dispatch::new(my_collector);
52//! ```
53//! Then, we can use [`with_default`] to set our `Dispatch` as the default for
54//! the duration of a block:
55//! ```rust
56//! # pub struct FooCollector;
57//! # use tracing_core::{
58//! #   dispatch, Event, Metadata,
59//! #   span::{Attributes, Current, Id, Record}
60//! # };
61//! # impl tracing_core::Collect for FooCollector {
62//! #   fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
63//! #   fn record(&self, _: &Id, _: &Record) {}
64//! #   fn event(&self, _: &Event) {}
65//! #   fn record_follows_from(&self, _: &Id, _: &Id) {}
66//! #   fn enabled(&self, _: &Metadata) -> bool { false }
67//! #   fn enter(&self, _: &Id) {}
68//! #   fn exit(&self, _: &Id) {}
69//! #   fn current_span(&self) -> Current { Current::unknown() }
70//! # }
71//! # impl FooCollector { fn new() -> Self { FooCollector } }
72//! # let _my_collector = FooCollector::new();
73//! # #[cfg(feature = "std")]
74//! # let my_dispatch = dispatch::Dispatch::new(_my_collector);
75//! // no default collector
76//!
77//! # #[cfg(feature = "std")]
78//! dispatch::with_default(&my_dispatch, || {
79//!     // my_collector is the default
80//! });
81//!
82//! // no default collector again
83//! ```
84//! It's important to note that `with_default` will not propagate the current
85//! thread's default collector to any threads spawned within the `with_default`
86//! block. To propagate the default collector to new threads, either use
87//! `with_default` from the new thread, or use `set_global_default`.
88//!
89//! As an alternative to `with_default`, we can use [`set_global_default`] to
90//! set a `Dispatch` as the default for all threads, for the lifetime of the
91//! program. For example:
92//! ```rust
93//! # pub struct FooCollector;
94//! # use tracing_core::{
95//! #   dispatch, Event, Metadata,
96//! #   span::{Attributes, Current, Id, Record}
97//! # };
98//! # impl tracing_core::Collect for FooCollector {
99//! #   fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
100//! #   fn record(&self, _: &Id, _: &Record) {}
101//! #   fn event(&self, _: &Event) {}
102//! #   fn record_follows_from(&self, _: &Id, _: &Id) {}
103//! #   fn enabled(&self, _: &Metadata) -> bool { false }
104//! #   fn enter(&self, _: &Id) {}
105//! #   fn exit(&self, _: &Id) {}
106//! #   fn current_span(&self) -> Current { Current::unknown() }
107//! # }
108//! # impl FooCollector { fn new() -> Self { FooCollector } }
109//! # #[cfg(feature = "std")]
110//! # let my_collector = FooCollector::new();
111//! # #[cfg(feature = "std")]
112//! # let my_dispatch = dispatch::Dispatch::new(my_collector);
113//! // no default collector
114//!
115//! # #[cfg(feature = "std")]
116//! dispatch::set_global_default(my_dispatch)
117//!     // `set_global_default` will return an error if the global default
118//!     // collector has already been set.
119//!     .expect("global default was already set!");
120//!
121//! // `my_collector` is now the default
122//! ```
123//!
124//! <div class="example-wrap" style="display:inline-block">
125//! <pre class="ignore" style="white-space:normal;font:inherit;">
126//!
127//! **Note**: the thread-local scoped dispatcher ([`with_default`]) requires the
128//! Rust standard library. `no_std` users should use [`set_global_default`] instead.
129//!
130//! </pre></div>
131//!
132//! ## Accessing the Default Collector
133//!
134//! A thread's current default collector can be accessed using the
135//! [`get_default`] function, which executes a closure with a reference to the
136//! currently default `Dispatch`. This is used primarily by `tracing`
137//! instrumentation.
138use crate::{
139    collect::{self, Collect, NoCollector},
140    span, Event, LevelFilter, Metadata,
141};
142
143use core::{any::Any, fmt, sync::atomic::Ordering};
144
145#[cfg(feature = "portable-atomic")]
146use portable_atomic::{AtomicBool, AtomicUsize};
147
148#[cfg(not(feature = "portable-atomic"))]
149use core::sync::atomic::{AtomicBool, AtomicUsize};
150
151#[cfg(feature = "std")]
152use std::{
153    cell::{Cell, RefCell, RefMut},
154    error,
155};
156
157#[cfg(all(feature = "alloc", not(feature = "portable-atomic")))]
158use alloc::sync::{Arc, Weak};
159
160#[cfg(all(feature = "alloc", feature = "portable-atomic"))]
161use portable_atomic_util::{Arc, Weak};
162
163#[cfg(feature = "alloc")]
164use core::ops::Deref;
165
166/// `Dispatch` trace data to a [`Collect`].
167#[derive(Clone)]
168pub struct Dispatch {
169    #[cfg(feature = "alloc")]
170    collector: Kind<Arc<dyn Collect + Send + Sync>>,
171
172    #[cfg(not(feature = "alloc"))]
173    collector: &'static (dyn Collect + Send + Sync),
174}
175
176/// `WeakDispatch` is a version of [`Dispatch`] that holds a non-owning reference
177/// to a [collector].
178///
179/// The collector may be accessed by calling [`WeakDispatch::upgrade`],
180/// which returns an `Option<Dispatch>`. If all [`Dispatch`] clones that point
181/// at the collector have been dropped, [`WeakDispatch::upgrade`] will return
182/// `None`. Otherwise, it will return `Some(Dispatch)`.
183///
184/// A `WeakDispatch` may be created from a [`Dispatch`] by calling the
185/// [`Dispatch::downgrade`] method. The primary use for creating a
186/// [`WeakDispatch`] is to allow a collector to hold a cyclical reference to
187/// itself without creating a memory leak. See [here] for details.
188///
189/// This type is analogous to the [`std::sync::Weak`] type, but for a
190/// [`Dispatch`] rather than an [`Arc`].
191///
192/// [collector]: Collect
193/// [`Arc`]: std::sync::Arc
194/// [here]: Collect#avoiding-memory-leaks
195#[derive(Clone)]
196pub struct WeakDispatch {
197    #[cfg(feature = "alloc")]
198    collector: Kind<Weak<dyn Collect + Send + Sync>>,
199
200    #[cfg(not(feature = "alloc"))]
201    collector: &'static (dyn Collect + Send + Sync),
202}
203
204#[cfg(feature = "alloc")]
205#[derive(Clone)]
206enum Kind<T> {
207    Global(&'static (dyn Collect + Send + Sync)),
208    Scoped(T),
209}
210
211#[cfg(feature = "std")]
212thread_local! {
213    static CURRENT_STATE: State = const {
214        State {
215            default: RefCell::new(None),
216            can_enter: Cell::new(true),
217        }
218    };
219}
220
221static EXISTS: AtomicBool = AtomicBool::new(false);
222static GLOBAL_INIT: AtomicUsize = AtomicUsize::new(UNINITIALIZED);
223
224#[cfg(feature = "std")]
225static SCOPED_COUNT: AtomicUsize = AtomicUsize::new(0);
226
227const UNINITIALIZED: usize = 0;
228const INITIALIZING: usize = 1;
229const INITIALIZED: usize = 2;
230
231static mut GLOBAL_DISPATCH: Dispatch = Dispatch {
232    #[cfg(feature = "alloc")]
233    collector: Kind::Global(&NO_COLLECTOR),
234    #[cfg(not(feature = "alloc"))]
235    collector: &NO_COLLECTOR,
236};
237static NONE: Dispatch = Dispatch {
238    #[cfg(feature = "alloc")]
239    collector: Kind::Global(&NO_COLLECTOR),
240    #[cfg(not(feature = "alloc"))]
241    collector: &NO_COLLECTOR,
242};
243static NO_COLLECTOR: NoCollector = NoCollector::new();
244
245/// The dispatch state of a thread.
246#[cfg(feature = "std")]
247struct State {
248    /// This thread's current default dispatcher.
249    default: RefCell<Option<Dispatch>>,
250    /// Whether or not we can currently begin dispatching a trace event.
251    ///
252    /// This is set to `false` when functions such as `enter`, `exit`, `event`,
253    /// and `new_span` are called on this thread's default dispatcher, to
254    /// prevent further trace events triggered inside those functions from
255    /// creating an infinite recursion. When we finish handling a dispatch, this
256    /// is set back to `true`.
257    can_enter: Cell<bool>,
258}
259
260/// While this guard is active, additional calls to collector functions on
261/// the default dispatcher will not be able to access the dispatch context.
262/// Dropping the guard will allow the dispatch context to be re-entered.
263#[cfg(feature = "std")]
264struct Entered<'a>(&'a State);
265
266/// A guard that resets the current default dispatcher to the prior
267/// default dispatcher when dropped.
268#[cfg(feature = "std")]
269#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
270#[derive(Debug)]
271pub struct DefaultGuard(Option<Dispatch>);
272
273/// Sets this dispatch as the default for the duration of a closure.
274///
275/// The default dispatcher is used when creating a new [span] or
276/// [`Event`].
277///
278/// <div class="example-wrap" style="display:inline-block">
279/// <pre class="ignore" style="white-space:normal;font:inherit;">
280/// <strong>Note</strong>: This function required the Rust standard library.
281/// <!-- hack: this whitespace makes rustdoc interpret the next line as markdown again -->
282///
283/// `no_std` users should use [`set_global_default`] instead.
284///
285/// </pre></div>
286///
287/// [span]: super::span
288/// [`Event`]: super::event::Event
289#[cfg(feature = "std")]
290#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
291pub fn with_default<T>(dispatcher: &Dispatch, f: impl FnOnce() -> T) -> T {
292    // When this guard is dropped, the default dispatcher will be reset to the
293    // prior default. Using this (rather than simply resetting after calling
294    // `f`) ensures that we always reset to the prior dispatcher even if `f`
295    // panics.
296    let _guard = set_default(dispatcher);
297    f()
298}
299
300/// Sets the dispatch as the default dispatch for the duration of the lifetime
301/// of the returned DefaultGuard
302///
303/// <div class="example-wrap" style="display:inline-block">
304/// <pre class="ignore" style="white-space:normal;font:inherit;">
305///
306/// **Note**: This function required the Rust standard library.
307/// `no_std` users should use [`set_global_default`] instead.
308///
309/// </pre></div>
310#[cfg(feature = "std")]
311#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
312#[must_use = "Dropping the guard unregisters the dispatcher."]
313pub fn set_default(dispatcher: &Dispatch) -> DefaultGuard {
314    // When this guard is dropped, the default dispatcher will be reset to the
315    // prior default. Using this ensures that we always reset to the prior
316    // dispatcher even if the thread calling this function panics.
317    State::set_default(dispatcher.clone())
318}
319
320/// Sets this dispatch as the global default for the duration of the entire program.
321/// Will be used as a fallback if no thread-local dispatch has been set in a thread
322/// (using `with_default`.)
323///
324/// Can only be set once; subsequent attempts to set the global default will fail.
325/// Returns `Err` if the global default has already been set.
326///
327///
328/// <div class="example-wrap" style="display:inline-block"><pre class="compile_fail" style="white-space:normal;font:inherit;">
329/// <strong>Warning</strong>: In general, libraries should <em>not</em> call
330/// <code>set_global_default()</code>! Doing so will cause conflicts when
331/// executables that depend on the library try to set the default collector later.
332/// </pre></div>
333///
334/// [span]: super::span
335/// [`Event`]: super::event::Event
336pub fn set_global_default(dispatcher: Dispatch) -> Result<(), SetGlobalDefaultError> {
337    // if `compare_exchange` returns Result::Ok(_), then `new` has been set and
338    // `current`—now the prior value—has been returned in the `Ok()` branch.
339    if GLOBAL_INIT
340        .compare_exchange(
341            UNINITIALIZED,
342            INITIALIZING,
343            Ordering::SeqCst,
344            Ordering::SeqCst,
345        )
346        .is_ok()
347    {
348        #[cfg(feature = "alloc")]
349        let collector = {
350            let collector = match dispatcher.collector {
351                Kind::Global(s) => s,
352                Kind::Scoped(s) => unsafe {
353                    // safety: this leaks the collector onto the heap. the
354                    // reference count will always be at least 1.
355                    &*Arc::into_raw(s)
356                },
357            };
358            Kind::Global(collector)
359        };
360
361        #[cfg(not(feature = "alloc"))]
362        let collector = dispatcher.collector;
363
364        unsafe {
365            GLOBAL_DISPATCH = Dispatch { collector };
366        }
367        GLOBAL_INIT.store(INITIALIZED, Ordering::SeqCst);
368        EXISTS.store(true, Ordering::Release);
369        Ok(())
370    } else {
371        Err(SetGlobalDefaultError { _no_construct: () })
372    }
373}
374
375/// Returns true if a `tracing` dispatcher has ever been set.
376///
377/// This may be used to completely elide trace points if tracing is not in use
378/// at all or has yet to be initialized.
379#[doc(hidden)]
380#[inline(always)]
381pub fn has_been_set() -> bool {
382    EXISTS.load(Ordering::Relaxed)
383}
384
385/// Returned if setting the global dispatcher fails.
386pub struct SetGlobalDefaultError {
387    _no_construct: (),
388}
389
390impl fmt::Debug for SetGlobalDefaultError {
391    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
392        f.debug_tuple("SetGlobalDefaultError")
393            .field(&Self::MESSAGE)
394            .finish()
395    }
396}
397
398impl fmt::Display for SetGlobalDefaultError {
399    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
400        f.pad(Self::MESSAGE)
401    }
402}
403
404#[cfg(feature = "std")]
405#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
406impl error::Error for SetGlobalDefaultError {}
407
408impl SetGlobalDefaultError {
409    const MESSAGE: &'static str = "a global default trace dispatcher has already been set";
410}
411
412/// Executes a closure with a reference to this thread's current [dispatcher].
413///
414/// Note that calls to `get_default` should not be nested; if this function is
415/// called while inside of another `get_default`, that closure will be provided
416/// with `Dispatch::none` rather than the previously set dispatcher.
417///
418/// [dispatcher]: super::dispatch::Dispatch
419#[cfg(feature = "std")]
420#[inline(always)]
421pub fn get_default<T, F>(mut f: F) -> T
422where
423    F: FnMut(&Dispatch) -> T,
424{
425    if SCOPED_COUNT.load(Ordering::Acquire) == 0 {
426        // fast path if no scoped dispatcher has been set; just use the global
427        // default.
428        return f(get_global());
429    }
430
431    get_default_slow(f)
432}
433
434#[cfg(feature = "std")]
435#[inline(never)]
436fn get_default_slow<T, F>(mut f: F) -> T
437where
438    F: FnMut(&Dispatch) -> T,
439{
440    // While this guard is active, additional calls to collector functions on
441    // the default dispatcher will not be able to access the dispatch context.
442    // Dropping the guard will allow the dispatch context to be re-entered.
443    struct Entered<'a>(&'a Cell<bool>);
444    impl Drop for Entered<'_> {
445        #[inline]
446        fn drop(&mut self) {
447            self.0.set(true);
448        }
449    }
450
451    CURRENT_STATE
452        .try_with(|state| {
453            if state.can_enter.replace(false) {
454                let _guard = Entered(&state.can_enter);
455
456                let mut default = state.default.borrow_mut();
457                let default = default
458                    // if the local default for this thread has never been set,
459                    // populate it with the global default, so we don't have to
460                    // keep getting the global on every `get_default_slow` call.
461                    .get_or_insert_with(|| get_global().clone());
462
463                return f(&*default);
464            }
465
466            f(&Dispatch::none())
467        })
468        .unwrap_or_else(|_| f(&Dispatch::none()))
469}
470
471/// Executes a closure with a reference to this thread's current [dispatcher].
472///
473/// Note that calls to `get_default` should not be nested; if this function is
474/// called while inside of another `get_default`, that closure will be provided
475/// with `Dispatch::none` rather than the previously set dispatcher.
476///
477/// [dispatcher]: super::dispatcher::Dispatch
478#[cfg(feature = "std")]
479#[doc(hidden)]
480#[inline(never)]
481pub fn get_current<T>(f: impl FnOnce(&Dispatch) -> T) -> Option<T> {
482    CURRENT_STATE
483        .try_with(|state| {
484            let entered = state.enter()?;
485            Some(f(&entered.current()))
486        })
487        .ok()?
488}
489
490/// Executes a closure with a reference to the current [dispatcher].
491///
492/// [dispatcher]: super::dispatcher::Dispatch
493#[cfg(not(feature = "std"))]
494#[doc(hidden)]
495pub fn get_current<T>(f: impl FnOnce(&Dispatch) -> T) -> Option<T> {
496    Some(f(&get_global()))
497}
498
499/// Executes a closure with a reference to the current [dispatcher].
500///
501/// [dispatcher]: super::dispatcher::Dispatch
502#[cfg(not(feature = "std"))]
503pub fn get_default<T, F>(mut f: F) -> T
504where
505    F: FnMut(&Dispatch) -> T,
506{
507    f(get_global())
508}
509
510#[inline(always)]
511pub(crate) fn get_global() -> &'static Dispatch {
512    if GLOBAL_INIT.load(Ordering::Acquire) != INITIALIZED {
513        return &NONE;
514    }
515    unsafe {
516        // This is safe given the invariant that setting the global dispatcher
517        // also sets `GLOBAL_INIT` to `INITIALIZED`.
518        #[allow(static_mut_refs)]
519        &GLOBAL_DISPATCH
520    }
521}
522
523#[cfg(feature = "std")]
524pub(crate) struct Registrar(Kind<Weak<dyn Collect + Send + Sync>>);
525
526impl Dispatch {
527    /// Returns a new `Dispatch` that discards events and spans.
528    #[inline]
529    pub fn none() -> Self {
530        Dispatch {
531            #[cfg(feature = "alloc")]
532            collector: Kind::Global(&NO_COLLECTOR),
533            #[cfg(not(feature = "alloc"))]
534            collector: &NO_COLLECTOR,
535        }
536    }
537
538    /// Returns a `Dispatch` that forwards to the given [`Collect`].
539    ///
540    /// [`Collect`]: super::collect::Collect
541    #[cfg(feature = "alloc")]
542    #[cfg_attr(docsrs, doc(cfg(any(feature = "std", feature = "alloc"))))]
543    pub fn new<C>(collector: C) -> Self
544    where
545        C: Collect + Send + Sync + 'static,
546    {
547        #[cfg(not(feature = "portable-atomic"))]
548        let arc = Arc::new(collector);
549
550        #[cfg(feature = "portable-atomic")]
551        let arc = {
552            use alloc::boxed::Box;
553
554            // Workaround for a lack of support for unsized coercion in non-first-party types.
555            // See https://github.com/rust-lang/rust/issues/18598
556            let boxed: Box<dyn Collect + Send + Sync> = Box::<C>::new(collector);
557            Arc::from(boxed)
558        };
559
560        let me = Dispatch {
561            collector: Kind::Scoped(arc),
562        };
563        crate::callsite::register_dispatch(&me);
564        me
565    }
566
567    /// Returns a `Dispatch` that forwards to the given static [collector].
568    ///
569    /// Unlike [`Dispatch::new`], this function is always available on all
570    /// platforms, even when the `std` or `alloc` features are disabled.
571    ///
572    /// In order to use `from_static`, the `Collector` itself must be stored in
573    /// a static. For example:
574    ///
575    /// ```rust
576    /// struct MyCollector {
577    ///    // ...
578    /// }
579    ///
580    /// # use tracing_core::{span::{Id, Attributes, Current, Record}, Event, Metadata};
581    /// impl tracing_core::Collect for MyCollector {
582    ///     // ...
583    /// #   fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
584    /// #   fn record(&self, _: &Id, _: &Record) {}
585    /// #   fn event(&self, _: &Event) {}
586    /// #   fn record_follows_from(&self, _: &Id, _: &Id) {}
587    /// #   fn enabled(&self, _: &Metadata) -> bool { false }
588    /// #   fn enter(&self, _: &Id) {}
589    /// #   fn exit(&self, _: &Id) {}
590    /// #   fn current_span(&self) -> Current { Current::unknown() }
591    /// }
592    ///
593    /// static COLLECTOR: MyCollector = MyCollector {
594    ///     // ...
595    /// };
596    ///
597    /// fn main() {
598    ///     use tracing_core::dispatch::{self, Dispatch};
599    ///
600    ///     let dispatch = Dispatch::from_static(&COLLECTOR);
601    ///
602    ///     dispatch::set_global_default(dispatch)
603    ///         .expect("no global default collector should have been set previously!");
604    /// }
605    /// ```
606    ///
607    /// Constructing the collector in a static initializer may make some forms
608    /// of runtime configuration more challenging. If this is the case, users
609    /// with access to `liballoc` or the Rust standard library are encouraged to
610    /// use [`Dispatch::new`] rather than `from_static`. `no_std` users who
611    /// cannot allocate or do not have access to `liballoc` may want to consider
612    /// the [`once_cell`] crate, or another library which allows lazy
613    /// initialization of statics.
614    ///
615    /// [collector]: super::collect::Collect
616    /// [`once_cell`]: https://crates.io/crates/once_cell
617    pub fn from_static(collector: &'static (dyn Collect + Send + Sync)) -> Self {
618        #[cfg(feature = "alloc")]
619        let me = Self {
620            collector: Kind::Global(collector),
621        };
622        #[cfg(not(feature = "alloc"))]
623        let me = Self { collector };
624        crate::callsite::register_dispatch(&me);
625        me
626    }
627
628    /// Creates a [`WeakDispatch`] from this `Dispatch`.
629    ///
630    /// A [`WeakDispatch`] is similar to a [`Dispatch`], but it does not prevent
631    /// the underlying [collector] from being dropped. Instead, it only permits
632    /// access while other references to the collector exist. This is equivalent
633    /// to the standard library's [`Arc::downgrade`] method, but for `Dispatch`
634    /// rather than `Arc`.
635    ///
636    /// The primary use for creating a [`WeakDispatch`] is to allow a collector
637    /// to hold a cyclical reference to itself without creating a memory leak.
638    /// See [here] for details.
639    ///
640    /// [collector]: Collect
641    /// [`Arc::downgrade`]: std::sync::Arc::downgrade
642    /// [here]: Collect#avoiding-memory-leaks
643    pub fn downgrade(&self) -> WeakDispatch {
644        #[cfg(feature = "alloc")]
645        let collector = match &self.collector {
646            Kind::Global(dispatch) => Kind::Global(*dispatch),
647            Kind::Scoped(dispatch) => Kind::Scoped(Arc::downgrade(dispatch)),
648        };
649        #[cfg(not(feature = "alloc"))]
650        let collector = self.collector;
651
652        WeakDispatch { collector }
653    }
654
655    #[cfg(feature = "std")]
656    pub(crate) fn registrar(&self) -> Registrar {
657        Registrar(match self.collector {
658            Kind::Scoped(ref s) => Kind::Scoped(Arc::downgrade(s)),
659            Kind::Global(s) => Kind::Global(s),
660        })
661    }
662
663    #[inline(always)]
664    #[cfg(feature = "alloc")]
665    pub(crate) fn collector(&self) -> &(dyn Collect + Send + Sync) {
666        match self.collector {
667            Kind::Scoped(ref s) => Arc::deref(s),
668            Kind::Global(s) => s,
669        }
670    }
671
672    #[inline(always)]
673    #[cfg(not(feature = "alloc"))]
674    pub(crate) fn collector(&self) -> &(dyn Collect + Send + Sync) {
675        self.collector
676    }
677
678    /// Registers a new callsite with this collector, returning whether or not
679    /// the collector is interested in being notified about the callsite.
680    ///
681    /// This calls the [`register_callsite`] function on the [`Collect`]
682    /// that this `Dispatch` forwards to.
683    ///
684    /// [`Collect`]: super::collect::Collect
685    /// [`register_callsite`]: super::collect::Collect::register_callsite
686    #[inline]
687    pub fn register_callsite(&self, metadata: &'static Metadata<'static>) -> collect::Interest {
688        self.collector().register_callsite(metadata)
689    }
690
691    /// Returns the highest [verbosity level][level] that this [collector] will
692    /// enable, or `None`, if the collector does not implement level-based
693    /// filtering or chooses not to implement this method.
694    ///
695    /// This calls the [`max_level_hint`] function on the [`Collect`]
696    /// that this `Dispatch` forwards to.
697    ///
698    /// [level]: super::Level
699    /// [collector]: super::collect::Collect
700    /// [`Collect`]: super::collect::Collect
701    /// [`register_callsite`]: super::collect::Collect::max_level_hint
702    // TODO(eliza): consider making this a public API?
703    #[inline]
704    pub(crate) fn max_level_hint(&self) -> Option<LevelFilter> {
705        self.collector().max_level_hint()
706    }
707
708    /// Record the construction of a new span, returning a new [ID] for the
709    /// span being constructed.
710    ///
711    /// This calls the [`new_span`] function on the [`Collect`] that this
712    /// `Dispatch` forwards to.
713    ///
714    /// [ID]: super::span::Id
715    /// [`Collect`]: super::collect::Collect
716    /// [`new_span`]: super::collect::Collect::new_span
717    #[inline]
718    pub fn new_span(&self, span: &span::Attributes<'_>) -> span::Id {
719        self.collector().new_span(span)
720    }
721
722    /// Record a set of values on a span.
723    ///
724    /// This calls the [`record`] function on the [`Collect`] that this
725    /// `Dispatch` forwards to.
726    ///
727    /// [`Collect`]: super::collect::Collect
728    /// [`record`]: super::collect::Collect::record
729    #[inline]
730    pub fn record(&self, span: &span::Id, values: &span::Record<'_>) {
731        self.collector().record(span, values)
732    }
733
734    /// Adds an indication that `span` follows from the span with the id
735    /// `follows`.
736    ///
737    /// This calls the [`record_follows_from`] function on the [`Collect`]
738    /// that this `Dispatch` forwards to.
739    ///
740    /// [`Collect`]: super::collect::Collect
741    /// [`record_follows_from`]: super::collect::Collect::record_follows_from
742    #[inline]
743    pub fn record_follows_from(&self, span: &span::Id, follows: &span::Id) {
744        self.collector().record_follows_from(span, follows)
745    }
746
747    /// Returns true if a span with the specified [metadata] would be
748    /// recorded.
749    ///
750    /// This calls the [`enabled`] function on the [`Collect`] that this
751    /// `Dispatch` forwards to.
752    ///
753    /// [metadata]: super::metadata::Metadata
754    /// [`Collect`]: super::collect::Collect
755    /// [`enabled`]: super::collect::Collect::enabled
756    #[inline]
757    pub fn enabled(&self, metadata: &Metadata<'_>) -> bool {
758        self.collector().enabled(metadata)
759    }
760
761    /// Records that an [`Event`] has occurred.
762    ///
763    /// This calls the [`event`] function on the [`Collect`] that this
764    /// `Dispatch` forwards to.
765    ///
766    /// [`Event`]: super::event::Event
767    /// [`Collect`]: super::collect::Collect
768    /// [`event`]: super::collect::Collect::event
769    #[inline]
770    pub fn event(&self, event: &Event<'_>) {
771        let collector = self.collector();
772        if collector.event_enabled(event) {
773            collector.event(event);
774        }
775    }
776
777    /// Records that a span has been can_enter.
778    ///
779    /// This calls the [`enter`] function on the [`Collect`] that this
780    /// `Dispatch` forwards to.
781    ///
782    /// [`Collect`]: super::collect::Collect
783    /// [`enter`]: super::collect::Collect::enter
784    pub fn enter(&self, span: &span::Id) {
785        self.collector().enter(span);
786    }
787
788    /// Records that a span has been exited.
789    ///
790    /// This calls the [`exit`] function on the [`Collect`] that this
791    /// `Dispatch` forwards to.
792    ///
793    /// [`Collect`]: super::collect::Collect
794    /// [`exit`]: super::collect::Collect::exit
795    pub fn exit(&self, span: &span::Id) {
796        self.collector().exit(span);
797    }
798
799    /// Notifies the [collector] that a [span ID] has been cloned.
800    ///
801    /// This function must only be called with span IDs that were returned by
802    /// this `Dispatch`'s [`new_span`] function. The `tracing` crate upholds
803    /// this guarantee and any other libraries implementing instrumentation APIs
804    /// must as well.
805    ///
806    /// This calls the [`clone_span`] function on the [`Collect`] that this
807    /// `Dispatch` forwards to.
808    ///
809    /// [span ID]: super::span::Id
810    /// [collector]: super::collect::Collect
811    /// [`clone_span`]: super::collect::Collect::clone_span
812    /// [`new_span`]: super::collect::Collect::new_span
813    #[inline]
814    pub fn clone_span(&self, id: &span::Id) -> span::Id {
815        self.collector().clone_span(id)
816    }
817
818    /// Notifies the collector that a [span ID] has been dropped.
819    ///
820    /// This function must only be called with span IDs that were returned by
821    /// this `Dispatch`'s [`new_span`] function. The `tracing` crate upholds
822    /// this guarantee and any other libraries implementing instrumentation APIs
823    /// must as well.
824    ///
825    /// This calls the [`drop_span`] function on the [`Collect`] that this
826    ///  `Dispatch` forwards to.
827    ///
828    /// <div class="example-wrap" style="display:inline-block"><pre class="compile_fail" style="white-space:normal;font:inherit;">
829    ///
830    /// **Deprecated**: The [`try_close`] method is functionally identical, but returns `true` if the span is now closed.
831    /// It should be used instead of this method.
832    ///
833    /// </pre></div>
834    ///
835    /// [span ID]: super::span::Id
836    /// [`Collect`]: super::collect::Collect
837    /// [`drop_span`]: super::collect::Collect::drop_span
838    /// [`new_span`]: super::collect::Collect::new_span
839    /// [`try_close`]: Self::try_close
840    #[inline]
841    #[deprecated(since = "0.1.2", note = "use `Dispatch::try_close` instead")]
842    pub fn drop_span(&self, id: span::Id) {
843        #[allow(deprecated)]
844        self.collector().drop_span(id);
845    }
846
847    /// Notifies the collector that a [span ID] has been dropped, and returns
848    /// `true` if there are now 0 IDs referring to that span.
849    ///
850    /// This function must only be called with span IDs that were returned by
851    /// this `Dispatch`'s [`new_span`] function. The `tracing` crate upholds
852    /// this guarantee and any other libraries implementing instrumentation APIs
853    /// must as well.
854    ///
855    /// This calls the [`try_close`] function on the [`Collect`] trait
856    /// that this `Dispatch` forwards to.
857    ///
858    /// [span ID]: super::span::Id
859    /// [`Collect`]: super::collect::Collect
860    /// [`try_close`]: super::collect::Collect::try_close
861    /// [`new_span`]: super::collect::Collect::new_span
862    pub fn try_close(&self, id: span::Id) -> bool {
863        self.collector().try_close(id)
864    }
865
866    /// Returns a type representing this collector's view of the current span.
867    ///
868    /// This calls the [`current`] function on the [`Collect`] that this
869    /// `Dispatch` forwards to.
870    ///
871    /// [`Collect`]: super::collect::Collect
872    /// [`current`]: super::collect::Collect::current_span
873    #[inline]
874    pub fn current_span(&self) -> span::Current {
875        self.collector().current_span()
876    }
877
878    /// Returns `true` if this `Dispatch` forwards to a collector of type
879    /// `T`.
880    #[inline]
881    pub fn is<T: Any>(&self) -> bool {
882        <dyn Collect>::is::<T>(self.collector())
883    }
884
885    /// Returns some reference to the [`Collect`] this `Dispatch` forwards to
886    /// if it is of type `T`, or `None` if it isn't.
887    ///
888    /// [`Collect`]: super::collect::Collect
889    #[inline]
890    pub fn downcast_ref<T: Any>(&self) -> Option<&T> {
891        <dyn Collect>::downcast_ref(self.collector())
892    }
893}
894
895impl Default for Dispatch {
896    /// Returns the current default dispatcher
897    fn default() -> Self {
898        get_default(|default| default.clone())
899    }
900}
901
902impl fmt::Debug for Dispatch {
903    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
904        match &self.collector {
905            #[cfg(feature = "alloc")]
906            Kind::Global(collector) => f
907                .debug_tuple("Dispatch::Global")
908                .field(&format_args!("{:p}", collector))
909                .finish(),
910
911            #[cfg(feature = "alloc")]
912            Kind::Scoped(collector) => f
913                .debug_tuple("Dispatch::Scoped")
914                .field(&format_args!("{:p}", collector))
915                .finish(),
916
917            #[cfg(not(feature = "alloc"))]
918            collector => f
919                .debug_tuple("Dispatch::Global")
920                .field(&format_args!("{:p}", collector))
921                .finish(),
922        }
923    }
924}
925
926#[cfg(feature = "std")]
927impl<C> From<C> for Dispatch
928where
929    C: Collect + Send + Sync + 'static,
930{
931    #[inline]
932    fn from(collector: C) -> Self {
933        Dispatch::new(collector)
934    }
935}
936
937impl WeakDispatch {
938    /// Attempts to upgrade this `WeakDispatch` to a [`Dispatch`].
939    ///
940    /// Returns `None` if the referenced `Dispatch` has already been dropped.
941    ///
942    /// ## Examples
943    ///
944    /// ```
945    /// # use tracing_core::collect::NoCollector;
946    /// # use tracing_core::dispatch::Dispatch;
947    /// static COLLECTOR: NoCollector = NoCollector::new();
948    /// let strong = Dispatch::new(COLLECTOR);
949    /// let weak = strong.downgrade();
950    ///
951    /// // The strong here keeps it alive, so we can still access the object.
952    /// assert!(weak.upgrade().is_some());
953    ///
954    /// drop(strong); // But not any more.
955    /// assert!(weak.upgrade().is_none());
956    /// ```
957    pub fn upgrade(&self) -> Option<Dispatch> {
958        #[cfg(feature = "alloc")]
959        let collector = match &self.collector {
960            Kind::Global(dispatch) => Some(Kind::Global(*dispatch)),
961            Kind::Scoped(dispatch) => dispatch.upgrade().map(Kind::Scoped),
962        };
963        #[cfg(not(feature = "alloc"))]
964        let collector = Some(self.collector);
965
966        collector.map(|collector| Dispatch { collector })
967    }
968}
969
970impl fmt::Debug for WeakDispatch {
971    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
972        match &self.collector {
973            #[cfg(feature = "alloc")]
974            Kind::Global(collector) => f
975                .debug_tuple("WeakDispatch::Global")
976                .field(&format_args!("{:p}", collector))
977                .finish(),
978
979            #[cfg(feature = "alloc")]
980            Kind::Scoped(collector) => f
981                .debug_tuple("WeakDispatch::Scoped")
982                .field(&format_args!("{:p}", collector))
983                .finish(),
984
985            #[cfg(not(feature = "alloc"))]
986            collector => f
987                .debug_tuple("WeakDispatch::Global")
988                .field(&format_args!("{:p}", collector))
989                .finish(),
990        }
991    }
992}
993
994#[cfg(feature = "std")]
995impl Registrar {
996    pub(crate) fn upgrade(&self) -> Option<Dispatch> {
997        match self.0 {
998            Kind::Global(s) => Some(Dispatch {
999                collector: Kind::Global(s),
1000            }),
1001            Kind::Scoped(ref s) => s.upgrade().map(|s| Dispatch {
1002                collector: Kind::Scoped(s),
1003            }),
1004        }
1005    }
1006}
1007
1008// ===== impl State =====
1009
1010#[cfg(feature = "std")]
1011impl State {
1012    /// Replaces the current default dispatcher on this thread with the provided
1013    /// dispatcher.
1014    ///
1015    /// Dropping the returned `ResetGuard` will reset the default dispatcher to
1016    /// the previous value.
1017    #[inline]
1018    fn set_default(new_dispatch: Dispatch) -> DefaultGuard {
1019        let prior = CURRENT_STATE
1020            .try_with(|state| {
1021                state.can_enter.set(true);
1022                state
1023                    .default
1024                    .replace(Some(new_dispatch))
1025                    // if the scoped default was not set on this thread, set the
1026                    // `prior` default to the global default to populate the
1027                    // scoped default when unsetting *this* default
1028                    .unwrap_or_else(|| get_global().clone())
1029            })
1030            .ok();
1031        EXISTS.store(true, Ordering::Release);
1032        SCOPED_COUNT.fetch_add(1, Ordering::Release);
1033        DefaultGuard(prior)
1034    }
1035
1036    #[inline]
1037    fn enter(&self) -> Option<Entered<'_>> {
1038        if self.can_enter.replace(false) {
1039            Some(Entered(self))
1040        } else {
1041            None
1042        }
1043    }
1044}
1045
1046// ===== impl Entered =====
1047
1048#[cfg(feature = "std")]
1049impl<'a> Entered<'a> {
1050    #[inline]
1051    fn current(&self) -> RefMut<'a, Dispatch> {
1052        let default = self.0.default.borrow_mut();
1053        RefMut::map(default, |default| {
1054            default.get_or_insert_with(|| get_global().clone())
1055        })
1056    }
1057}
1058
1059#[cfg(feature = "std")]
1060impl Drop for Entered<'_> {
1061    #[inline]
1062    fn drop(&mut self) {
1063        self.0.can_enter.set(true);
1064    }
1065}
1066
1067// ===== impl DefaultGuard =====
1068
1069#[cfg(feature = "std")]
1070impl Drop for DefaultGuard {
1071    #[inline]
1072    fn drop(&mut self) {
1073        SCOPED_COUNT.fetch_sub(1, Ordering::Release);
1074        if let Some(dispatch) = self.0.take() {
1075            // Replace the dispatcher and then drop the old one outside
1076            // of the thread-local context. Dropping the dispatch may
1077            // lead to the drop of a collector which, in the process,
1078            // could then also attempt to access the same thread local
1079            // state -- causing a clash.
1080            let prev = CURRENT_STATE.try_with(|state| state.default.replace(Some(dispatch)));
1081            drop(prev)
1082        }
1083    }
1084}
1085
1086#[cfg(test)]
1087mod test {
1088
1089    use super::*;
1090    use crate::{
1091        callsite::Callsite,
1092        collect::Interest,
1093        metadata::{Kind, Level, Metadata},
1094    };
1095
1096    #[test]
1097    fn dispatch_is() {
1098        let dispatcher = Dispatch::from_static(&NO_COLLECTOR);
1099        assert!(dispatcher.is::<NoCollector>());
1100    }
1101
1102    #[test]
1103    fn dispatch_downcasts() {
1104        let dispatcher = Dispatch::from_static(&NO_COLLECTOR);
1105        assert!(dispatcher.downcast_ref::<NoCollector>().is_some());
1106    }
1107
1108    struct TestCallsite;
1109    static TEST_CALLSITE: TestCallsite = TestCallsite;
1110    static TEST_META: Metadata<'static> = metadata! {
1111        name: "test",
1112        target: module_path!(),
1113        level: Level::DEBUG,
1114        fields: &[],
1115        callsite: &TEST_CALLSITE,
1116        kind: Kind::EVENT
1117    };
1118
1119    impl Callsite for TestCallsite {
1120        fn set_interest(&self, _: Interest) {}
1121        fn metadata(&self) -> &Metadata<'_> {
1122            &TEST_META
1123        }
1124    }
1125
1126    #[test]
1127    #[cfg(feature = "std")]
1128    fn events_dont_infinite_loop() {
1129        // This test ensures that an event triggered within a collector
1130        // won't cause an infinite loop of events.
1131        struct TestCollector;
1132        impl Collect for TestCollector {
1133            fn enabled(&self, _: &Metadata<'_>) -> bool {
1134                true
1135            }
1136
1137            fn new_span(&self, _: &span::Attributes<'_>) -> span::Id {
1138                span::Id::from_u64(0xAAAA)
1139            }
1140
1141            fn record(&self, _: &span::Id, _: &span::Record<'_>) {}
1142
1143            fn record_follows_from(&self, _: &span::Id, _: &span::Id) {}
1144
1145            fn event(&self, _: &Event<'_>) {
1146                static EVENTS: AtomicUsize = AtomicUsize::new(0);
1147                assert_eq!(
1148                    EVENTS.fetch_add(1, Ordering::Relaxed),
1149                    0,
1150                    "event method called twice!"
1151                );
1152                Event::dispatch(&TEST_META, &TEST_META.fields().value_set(&[]))
1153            }
1154
1155            fn enter(&self, _: &span::Id) {}
1156
1157            fn exit(&self, _: &span::Id) {}
1158
1159            fn current_span(&self) -> span::Current {
1160                span::Current::unknown()
1161            }
1162        }
1163
1164        with_default(&Dispatch::new(TestCollector), || {
1165            Event::dispatch(&TEST_META, &TEST_META.fields().value_set(&[]))
1166        })
1167    }
1168
1169    #[test]
1170    #[cfg(feature = "std")]
1171    fn spans_dont_infinite_loop() {
1172        // This test ensures that a span created within a collector
1173        // won't cause an infinite loop of new spans.
1174
1175        fn mk_span() {
1176            get_default(|current| {
1177                current.new_span(&span::Attributes::new(
1178                    &TEST_META,
1179                    &TEST_META.fields().value_set(&[]),
1180                ))
1181            });
1182        }
1183
1184        struct TestCollector;
1185        impl Collect for TestCollector {
1186            fn enabled(&self, _: &Metadata<'_>) -> bool {
1187                true
1188            }
1189
1190            fn new_span(&self, _: &span::Attributes<'_>) -> span::Id {
1191                static NEW_SPANS: AtomicUsize = AtomicUsize::new(0);
1192                assert_eq!(
1193                    NEW_SPANS.fetch_add(1, Ordering::Relaxed),
1194                    0,
1195                    "new_span method called twice!"
1196                );
1197                mk_span();
1198                span::Id::from_u64(0xAAAA)
1199            }
1200
1201            fn record(&self, _: &span::Id, _: &span::Record<'_>) {}
1202
1203            fn record_follows_from(&self, _: &span::Id, _: &span::Id) {}
1204
1205            fn event(&self, _: &Event<'_>) {}
1206
1207            fn enter(&self, _: &span::Id) {}
1208
1209            fn exit(&self, _: &span::Id) {}
1210
1211            fn current_span(&self) -> span::Current {
1212                span::Current::unknown()
1213            }
1214        }
1215
1216        with_default(&Dispatch::new(TestCollector), mk_span)
1217    }
1218
1219    #[test]
1220    fn default_no_collector() {
1221        let default_dispatcher = Dispatch::default();
1222        assert!(default_dispatcher.is::<NoCollector>());
1223    }
1224
1225    #[cfg(feature = "std")]
1226    #[test]
1227    fn default_dispatch() {
1228        struct TestCollector;
1229        impl Collect for TestCollector {
1230            fn enabled(&self, _: &Metadata<'_>) -> bool {
1231                true
1232            }
1233
1234            fn new_span(&self, _: &span::Attributes<'_>) -> span::Id {
1235                span::Id::from_u64(0xAAAA)
1236            }
1237
1238            fn record(&self, _: &span::Id, _: &span::Record<'_>) {}
1239
1240            fn record_follows_from(&self, _: &span::Id, _: &span::Id) {}
1241
1242            fn event(&self, _: &Event<'_>) {}
1243
1244            fn enter(&self, _: &span::Id) {}
1245
1246            fn exit(&self, _: &span::Id) {}
1247
1248            fn current_span(&self) -> span::Current {
1249                span::Current::unknown()
1250            }
1251        }
1252        let guard = set_default(&Dispatch::new(TestCollector));
1253        let default_dispatcher = Dispatch::default();
1254        assert!(default_dispatcher.is::<TestCollector>());
1255
1256        drop(guard);
1257        let default_dispatcher = Dispatch::default();
1258        assert!(default_dispatcher.is::<NoCollector>());
1259    }
1260}