tracing_core/dispatch.rs
1//! Dispatches trace events to [`Collect`]s.
2//!
3//! The _dispatcher_ is the component of the tracing system which is responsible
4//! for forwarding trace data from the instrumentation points that generate it
5//! to the collector that collects it.
6//!
7//! # Using the Trace Dispatcher
8//!
9//! Every thread in a program using `tracing` has a _default collector_. When
10//! events occur, or spans are created, they are dispatched to the thread's
11//! current collector.
12//!
13//! ## Setting the Default Collector
14//!
15//! By default, the current collector is an empty implementation that does
16//! nothing. Trace data provided to this "do nothing" implementation is
17//! immediately discarded, and is not available for any purpose.
18//!
19//! To use another collector implementation, it must be set as the default.
20//! There are two methods for doing so: [`with_default`] and
21//! [`set_global_default`]. `with_default` sets the default collector for the
22//! duration of a scope, while `set_global_default` sets a default collector
23//! for the entire process.
24//!
25//! To use either of these functions, we must first wrap our collector in a
26//! [`Dispatch`], a cloneable, type-erased reference to a collector. For
27//! example:
28//! ```rust
29//! # pub struct FooCollector;
30//! # use tracing_core::{
31//! # dispatch, Event, Metadata,
32//! # span::{Attributes, Current, Id, Record}
33//! # };
34//! # impl tracing_core::Collect for FooCollector {
35//! # fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
36//! # fn record(&self, _: &Id, _: &Record) {}
37//! # fn event(&self, _: &Event) {}
38//! # fn record_follows_from(&self, _: &Id, _: &Id) {}
39//! # fn enabled(&self, _: &Metadata) -> bool { false }
40//! # fn enter(&self, _: &Id) {}
41//! # fn exit(&self, _: &Id) {}
42//! # fn current_span(&self) -> Current { Current::unknown() }
43//! # }
44//! # impl FooCollector { fn new() -> Self { FooCollector } }
45//! # #[cfg(feature = "alloc")]
46//! use dispatch::Dispatch;
47//!
48//! # #[cfg(feature = "alloc")]
49//! let my_collector = FooCollector::new();
50//! # #[cfg(feature = "alloc")]
51//! let my_dispatch = Dispatch::new(my_collector);
52//! ```
53//! Then, we can use [`with_default`] to set our `Dispatch` as the default for
54//! the duration of a block:
55//! ```rust
56//! # pub struct FooCollector;
57//! # use tracing_core::{
58//! # dispatch, Event, Metadata,
59//! # span::{Attributes, Current, Id, Record}
60//! # };
61//! # impl tracing_core::Collect for FooCollector {
62//! # fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
63//! # fn record(&self, _: &Id, _: &Record) {}
64//! # fn event(&self, _: &Event) {}
65//! # fn record_follows_from(&self, _: &Id, _: &Id) {}
66//! # fn enabled(&self, _: &Metadata) -> bool { false }
67//! # fn enter(&self, _: &Id) {}
68//! # fn exit(&self, _: &Id) {}
69//! # fn current_span(&self) -> Current { Current::unknown() }
70//! # }
71//! # impl FooCollector { fn new() -> Self { FooCollector } }
72//! # let _my_collector = FooCollector::new();
73//! # #[cfg(feature = "std")]
74//! # let my_dispatch = dispatch::Dispatch::new(_my_collector);
75//! // no default collector
76//!
77//! # #[cfg(feature = "std")]
78//! dispatch::with_default(&my_dispatch, || {
79//! // my_collector is the default
80//! });
81//!
82//! // no default collector again
83//! ```
84//! It's important to note that `with_default` will not propagate the current
85//! thread's default collector to any threads spawned within the `with_default`
86//! block. To propagate the default collector to new threads, either use
87//! `with_default` from the new thread, or use `set_global_default`.
88//!
89//! As an alternative to `with_default`, we can use [`set_global_default`] to
90//! set a `Dispatch` as the default for all threads, for the lifetime of the
91//! program. For example:
92//! ```rust
93//! # pub struct FooCollector;
94//! # use tracing_core::{
95//! # dispatch, Event, Metadata,
96//! # span::{Attributes, Current, Id, Record}
97//! # };
98//! # impl tracing_core::Collect for FooCollector {
99//! # fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
100//! # fn record(&self, _: &Id, _: &Record) {}
101//! # fn event(&self, _: &Event) {}
102//! # fn record_follows_from(&self, _: &Id, _: &Id) {}
103//! # fn enabled(&self, _: &Metadata) -> bool { false }
104//! # fn enter(&self, _: &Id) {}
105//! # fn exit(&self, _: &Id) {}
106//! # fn current_span(&self) -> Current { Current::unknown() }
107//! # }
108//! # impl FooCollector { fn new() -> Self { FooCollector } }
109//! # #[cfg(feature = "std")]
110//! # let my_collector = FooCollector::new();
111//! # #[cfg(feature = "std")]
112//! # let my_dispatch = dispatch::Dispatch::new(my_collector);
113//! // no default collector
114//!
115//! # #[cfg(feature = "std")]
116//! dispatch::set_global_default(my_dispatch)
117//! // `set_global_default` will return an error if the global default
118//! // collector has already been set.
119//! .expect("global default was already set!");
120//!
121//! // `my_collector` is now the default
122//! ```
123//!
124//! <div class="example-wrap" style="display:inline-block">
125//! <pre class="ignore" style="white-space:normal;font:inherit;">
126//!
127//! **Note**: the thread-local scoped dispatcher ([`with_default`]) requires the
128//! Rust standard library. `no_std` users should use [`set_global_default`] instead.
129//!
130//! </pre></div>
131//!
132//! ## Accessing the Default Collector
133//!
134//! A thread's current default collector can be accessed using the
135//! [`get_default`] function, which executes a closure with a reference to the
136//! currently default `Dispatch`. This is used primarily by `tracing`
137//! instrumentation.
138use crate::{
139 collect::{self, Collect, NoCollector},
140 span, Event, LevelFilter, Metadata,
141};
142
143use core::{
144 any::Any,
145 fmt,
146 sync::atomic::{AtomicBool, AtomicUsize, Ordering},
147};
148
149#[cfg(feature = "std")]
150use std::{
151 cell::{Cell, RefCell, RefMut},
152 error,
153};
154
155#[cfg(all(feature = "alloc", not(feature = "portable-atomic")))]
156use alloc::sync::{Arc, Weak};
157
158#[cfg(all(feature = "alloc", feature = "portable-atomic"))]
159use portable_atomic_util::{Arc, Weak};
160
161#[cfg(feature = "alloc")]
162use core::ops::Deref;
163
164/// `Dispatch` trace data to a [`Collect`].
165#[derive(Clone)]
166pub struct Dispatch {
167 #[cfg(feature = "alloc")]
168 collector: Kind<Arc<dyn Collect + Send + Sync>>,
169
170 #[cfg(not(feature = "alloc"))]
171 collector: &'static (dyn Collect + Send + Sync),
172}
173
174/// `WeakDispatch` is a version of [`Dispatch`] that holds a non-owning reference
175/// to a [collector].
176///
177/// The collector may be accessed by calling [`WeakDispatch::upgrade`],
178/// which returns an `Option<Dispatch>`. If all [`Dispatch`] clones that point
179/// at the collector have been dropped, [`WeakDispatch::upgrade`] will return
180/// `None`. Otherwise, it will return `Some(Dispatch)`.
181///
182/// A `WeakDispatch` may be created from a [`Dispatch`] by calling the
183/// [`Dispatch::downgrade`] method. The primary use for creating a
184/// [`WeakDispatch`] is to allow a collector to hold a cyclical reference to
185/// itself without creating a memory leak. See [here] for details.
186///
187/// This type is analogous to the [`std::sync::Weak`] type, but for a
188/// [`Dispatch`] rather than an [`Arc`].
189///
190/// [collector]: Collect
191/// [`Arc`]: std::sync::Arc
192/// [here]: Collect#avoiding-memory-leaks
193#[derive(Clone)]
194pub struct WeakDispatch {
195 #[cfg(feature = "alloc")]
196 collector: Kind<Weak<dyn Collect + Send + Sync>>,
197
198 #[cfg(not(feature = "alloc"))]
199 collector: &'static (dyn Collect + Send + Sync),
200}
201
202#[cfg(feature = "alloc")]
203#[derive(Clone)]
204enum Kind<T> {
205 Global(&'static (dyn Collect + Send + Sync)),
206 Scoped(T),
207}
208
209#[cfg(feature = "std")]
210thread_local! {
211 static CURRENT_STATE: State = const {
212 State {
213 default: RefCell::new(None),
214 can_enter: Cell::new(true),
215 }
216 };
217}
218
219static EXISTS: AtomicBool = AtomicBool::new(false);
220static GLOBAL_INIT: AtomicUsize = AtomicUsize::new(UNINITIALIZED);
221
222#[cfg(feature = "std")]
223static SCOPED_COUNT: AtomicUsize = AtomicUsize::new(0);
224
225const UNINITIALIZED: usize = 0;
226const INITIALIZING: usize = 1;
227const INITIALIZED: usize = 2;
228
229static mut GLOBAL_DISPATCH: Dispatch = Dispatch {
230 #[cfg(feature = "alloc")]
231 collector: Kind::Global(&NO_COLLECTOR),
232 #[cfg(not(feature = "alloc"))]
233 collector: &NO_COLLECTOR,
234};
235static NONE: Dispatch = Dispatch {
236 #[cfg(feature = "alloc")]
237 collector: Kind::Global(&NO_COLLECTOR),
238 #[cfg(not(feature = "alloc"))]
239 collector: &NO_COLLECTOR,
240};
241static NO_COLLECTOR: NoCollector = NoCollector::new();
242
243/// The dispatch state of a thread.
244#[cfg(feature = "std")]
245struct State {
246 /// This thread's current default dispatcher.
247 default: RefCell<Option<Dispatch>>,
248 /// Whether or not we can currently begin dispatching a trace event.
249 ///
250 /// This is set to `false` when functions such as `enter`, `exit`, `event`,
251 /// and `new_span` are called on this thread's default dispatcher, to
252 /// prevent further trace events triggered inside those functions from
253 /// creating an infinite recursion. When we finish handling a dispatch, this
254 /// is set back to `true`.
255 can_enter: Cell<bool>,
256}
257
258/// While this guard is active, additional calls to collector functions on
259/// the default dispatcher will not be able to access the dispatch context.
260/// Dropping the guard will allow the dispatch context to be re-entered.
261#[cfg(feature = "std")]
262struct Entered<'a>(&'a State);
263
264/// A guard that resets the current default dispatcher to the prior
265/// default dispatcher when dropped.
266#[cfg(feature = "std")]
267#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
268#[derive(Debug)]
269pub struct DefaultGuard(Option<Dispatch>);
270
271/// Sets this dispatch as the default for the duration of a closure.
272///
273/// The default dispatcher is used when creating a new [span] or
274/// [`Event`].
275///
276/// <div class="example-wrap" style="display:inline-block">
277/// <pre class="ignore" style="white-space:normal;font:inherit;">
278/// <strong>Note</strong>: This function required the Rust standard library.
279/// <!-- hack: this whitespace makes rustdoc interpret the next line as markdown again -->
280///
281/// `no_std` users should use [`set_global_default`] instead.
282///
283/// </pre></div>
284///
285/// [span]: super::span
286/// [`Event`]: super::event::Event
287#[cfg(feature = "std")]
288#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
289pub fn with_default<T>(dispatcher: &Dispatch, f: impl FnOnce() -> T) -> T {
290 // When this guard is dropped, the default dispatcher will be reset to the
291 // prior default. Using this (rather than simply resetting after calling
292 // `f`) ensures that we always reset to the prior dispatcher even if `f`
293 // panics.
294 let _guard = set_default(dispatcher);
295 f()
296}
297
298/// Sets the dispatch as the default dispatch for the duration of the lifetime
299/// of the returned DefaultGuard
300///
301/// <div class="example-wrap" style="display:inline-block">
302/// <pre class="ignore" style="white-space:normal;font:inherit;">
303///
304/// **Note**: This function required the Rust standard library.
305/// `no_std` users should use [`set_global_default`] instead.
306///
307/// </pre></div>
308#[cfg(feature = "std")]
309#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
310#[must_use = "Dropping the guard unregisters the dispatcher."]
311pub fn set_default(dispatcher: &Dispatch) -> DefaultGuard {
312 // When this guard is dropped, the default dispatcher will be reset to the
313 // prior default. Using this ensures that we always reset to the prior
314 // dispatcher even if the thread calling this function panics.
315 State::set_default(dispatcher.clone())
316}
317
318/// Sets this dispatch as the global default for the duration of the entire program.
319/// Will be used as a fallback if no thread-local dispatch has been set in a thread
320/// (using `with_default`.)
321///
322/// Can only be set once; subsequent attempts to set the global default will fail.
323/// Returns `Err` if the global default has already been set.
324///
325///
326/// <div class="example-wrap" style="display:inline-block"><pre class="compile_fail" style="white-space:normal;font:inherit;">
327/// <strong>Warning</strong>: In general, libraries should <em>not</em> call
328/// <code>set_global_default()</code>! Doing so will cause conflicts when
329/// executables that depend on the library try to set the default collector later.
330/// </pre></div>
331///
332/// [span]: super::span
333/// [`Event`]: super::event::Event
334pub fn set_global_default(dispatcher: Dispatch) -> Result<(), SetGlobalDefaultError> {
335 // if `compare_exchange` returns Result::Ok(_), then `new` has been set and
336 // `current`—now the prior value—has been returned in the `Ok()` branch.
337 if GLOBAL_INIT
338 .compare_exchange(
339 UNINITIALIZED,
340 INITIALIZING,
341 Ordering::SeqCst,
342 Ordering::SeqCst,
343 )
344 .is_ok()
345 {
346 #[cfg(feature = "alloc")]
347 let collector = {
348 let collector = match dispatcher.collector {
349 Kind::Global(s) => s,
350 Kind::Scoped(s) => unsafe {
351 // safety: this leaks the collector onto the heap. the
352 // reference count will always be at least 1.
353 &*Arc::into_raw(s)
354 },
355 };
356 Kind::Global(collector)
357 };
358
359 #[cfg(not(feature = "alloc"))]
360 let collector = dispatcher.collector;
361
362 unsafe {
363 GLOBAL_DISPATCH = Dispatch { collector };
364 }
365 GLOBAL_INIT.store(INITIALIZED, Ordering::SeqCst);
366 EXISTS.store(true, Ordering::Release);
367 Ok(())
368 } else {
369 Err(SetGlobalDefaultError { _no_construct: () })
370 }
371}
372
373/// Returns true if a `tracing` dispatcher has ever been set.
374///
375/// This may be used to completely elide trace points if tracing is not in use
376/// at all or has yet to be initialized.
377#[doc(hidden)]
378#[inline(always)]
379pub fn has_been_set() -> bool {
380 EXISTS.load(Ordering::Relaxed)
381}
382
383/// Returned if setting the global dispatcher fails.
384pub struct SetGlobalDefaultError {
385 _no_construct: (),
386}
387
388impl fmt::Debug for SetGlobalDefaultError {
389 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
390 f.debug_tuple("SetGlobalDefaultError")
391 .field(&Self::MESSAGE)
392 .finish()
393 }
394}
395
396impl fmt::Display for SetGlobalDefaultError {
397 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
398 f.pad(Self::MESSAGE)
399 }
400}
401
402#[cfg(feature = "std")]
403#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
404impl error::Error for SetGlobalDefaultError {}
405
406impl SetGlobalDefaultError {
407 const MESSAGE: &'static str = "a global default trace dispatcher has already been set";
408}
409
410/// Executes a closure with a reference to this thread's current [dispatcher].
411///
412/// Note that calls to `get_default` should not be nested; if this function is
413/// called while inside of another `get_default`, that closure will be provided
414/// with `Dispatch::none` rather than the previously set dispatcher.
415///
416/// [dispatcher]: super::dispatch::Dispatch
417#[cfg(feature = "std")]
418#[inline(always)]
419pub fn get_default<T, F>(mut f: F) -> T
420where
421 F: FnMut(&Dispatch) -> T,
422{
423 if SCOPED_COUNT.load(Ordering::Acquire) == 0 {
424 // fast path if no scoped dispatcher has been set; just use the global
425 // default.
426 return f(get_global());
427 }
428
429 get_default_slow(f)
430}
431
432#[cfg(feature = "std")]
433#[inline(never)]
434fn get_default_slow<T, F>(mut f: F) -> T
435where
436 F: FnMut(&Dispatch) -> T,
437{
438 // While this guard is active, additional calls to collector functions on
439 // the default dispatcher will not be able to access the dispatch context.
440 // Dropping the guard will allow the dispatch context to be re-entered.
441 struct Entered<'a>(&'a Cell<bool>);
442 impl Drop for Entered<'_> {
443 #[inline]
444 fn drop(&mut self) {
445 self.0.set(true);
446 }
447 }
448
449 CURRENT_STATE
450 .try_with(|state| {
451 if state.can_enter.replace(false) {
452 let _guard = Entered(&state.can_enter);
453
454 let mut default = state.default.borrow_mut();
455 let default = default
456 // if the local default for this thread has never been set,
457 // populate it with the global default, so we don't have to
458 // keep getting the global on every `get_default_slow` call.
459 .get_or_insert_with(|| get_global().clone());
460
461 return f(&*default);
462 }
463
464 f(&Dispatch::none())
465 })
466 .unwrap_or_else(|_| f(&Dispatch::none()))
467}
468
469/// Executes a closure with a reference to this thread's current [dispatcher].
470///
471/// Note that calls to `get_default` should not be nested; if this function is
472/// called while inside of another `get_default`, that closure will be provided
473/// with `Dispatch::none` rather than the previously set dispatcher.
474///
475/// [dispatcher]: super::dispatcher::Dispatch
476#[cfg(feature = "std")]
477#[doc(hidden)]
478#[inline(never)]
479pub fn get_current<T>(f: impl FnOnce(&Dispatch) -> T) -> Option<T> {
480 CURRENT_STATE
481 .try_with(|state| {
482 let entered = state.enter()?;
483 Some(f(&entered.current()))
484 })
485 .ok()?
486}
487
488/// Executes a closure with a reference to the current [dispatcher].
489///
490/// [dispatcher]: super::dispatcher::Dispatch
491#[cfg(not(feature = "std"))]
492#[doc(hidden)]
493pub fn get_current<T>(f: impl FnOnce(&Dispatch) -> T) -> Option<T> {
494 Some(f(&get_global()))
495}
496
497/// Executes a closure with a reference to the current [dispatcher].
498///
499/// [dispatcher]: super::dispatcher::Dispatch
500#[cfg(not(feature = "std"))]
501pub fn get_default<T, F>(mut f: F) -> T
502where
503 F: FnMut(&Dispatch) -> T,
504{
505 f(get_global())
506}
507
508#[inline(always)]
509pub(crate) fn get_global() -> &'static Dispatch {
510 if GLOBAL_INIT.load(Ordering::Acquire) != INITIALIZED {
511 return &NONE;
512 }
513 unsafe {
514 // This is safe given the invariant that setting the global dispatcher
515 // also sets `GLOBAL_INIT` to `INITIALIZED`.
516 #[allow(static_mut_refs)]
517 &GLOBAL_DISPATCH
518 }
519}
520
521#[cfg(feature = "std")]
522pub(crate) struct Registrar(Kind<Weak<dyn Collect + Send + Sync>>);
523
524impl Dispatch {
525 /// Returns a new `Dispatch` that discards events and spans.
526 #[inline]
527 pub fn none() -> Self {
528 Dispatch {
529 #[cfg(feature = "alloc")]
530 collector: Kind::Global(&NO_COLLECTOR),
531 #[cfg(not(feature = "alloc"))]
532 collector: &NO_COLLECTOR,
533 }
534 }
535
536 /// Returns a `Dispatch` that forwards to the given [`Collect`].
537 ///
538 /// [`Collect`]: super::collect::Collect
539 #[cfg(feature = "alloc")]
540 #[cfg_attr(docsrs, doc(cfg(any(feature = "std", feature = "alloc"))))]
541 pub fn new<C>(collector: C) -> Self
542 where
543 C: Collect + Send + Sync + 'static,
544 {
545 #[cfg(not(feature = "portable-atomic"))]
546 let arc = Arc::new(collector);
547
548 #[cfg(feature = "portable-atomic")]
549 let arc = {
550 use alloc::boxed::Box;
551
552 // Workaround for a lack of support for unsized coercion in non-first-party types.
553 // See https://github.com/rust-lang/rust/issues/18598
554 let boxed: Box<dyn Collect + Send + Sync> = Box::<C>::new(collector);
555 Arc::from(boxed)
556 };
557
558 let me = Dispatch {
559 collector: Kind::Scoped(arc),
560 };
561 crate::callsite::register_dispatch(&me);
562 me
563 }
564
565 /// Returns a `Dispatch` that forwards to the given static [collector].
566 ///
567 /// Unlike [`Dispatch::new`], this function is always available on all
568 /// platforms, even when the `std` or `alloc` features are disabled.
569 ///
570 /// In order to use `from_static`, the `Collector` itself must be stored in
571 /// a static. For example:
572 ///
573 /// ```rust
574 /// struct MyCollector {
575 /// // ...
576 /// }
577 ///
578 /// # use tracing_core::{span::{Id, Attributes, Current, Record}, Event, Metadata};
579 /// impl tracing_core::Collect for MyCollector {
580 /// // ...
581 /// # fn new_span(&self, _: &Attributes) -> Id { Id::from_u64(0) }
582 /// # fn record(&self, _: &Id, _: &Record) {}
583 /// # fn event(&self, _: &Event) {}
584 /// # fn record_follows_from(&self, _: &Id, _: &Id) {}
585 /// # fn enabled(&self, _: &Metadata) -> bool { false }
586 /// # fn enter(&self, _: &Id) {}
587 /// # fn exit(&self, _: &Id) {}
588 /// # fn current_span(&self) -> Current { Current::unknown() }
589 /// }
590 ///
591 /// static COLLECTOR: MyCollector = MyCollector {
592 /// // ...
593 /// };
594 ///
595 /// fn main() {
596 /// use tracing_core::dispatch::{self, Dispatch};
597 ///
598 /// let dispatch = Dispatch::from_static(&COLLECTOR);
599 ///
600 /// dispatch::set_global_default(dispatch)
601 /// .expect("no global default collector should have been set previously!");
602 /// }
603 /// ```
604 ///
605 /// Constructing the collector in a static initializer may make some forms
606 /// of runtime configuration more challenging. If this is the case, users
607 /// with access to `liballoc` or the Rust standard library are encouraged to
608 /// use [`Dispatch::new`] rather than `from_static`. `no_std` users who
609 /// cannot allocate or do not have access to `liballoc` may want to consider
610 /// the [`once_cell`] crate, or another library which allows lazy
611 /// initialization of statics.
612 ///
613 /// [collector]: super::collect::Collect
614 /// [`once_cell`]: https://crates.io/crates/once_cell
615 pub fn from_static(collector: &'static (dyn Collect + Send + Sync)) -> Self {
616 #[cfg(feature = "alloc")]
617 let me = Self {
618 collector: Kind::Global(collector),
619 };
620 #[cfg(not(feature = "alloc"))]
621 let me = Self { collector };
622 crate::callsite::register_dispatch(&me);
623 me
624 }
625
626 /// Creates a [`WeakDispatch`] from this `Dispatch`.
627 ///
628 /// A [`WeakDispatch`] is similar to a [`Dispatch`], but it does not prevent
629 /// the underlying [collector] from being dropped. Instead, it only permits
630 /// access while other references to the collector exist. This is equivalent
631 /// to the standard library's [`Arc::downgrade`] method, but for `Dispatch`
632 /// rather than `Arc`.
633 ///
634 /// The primary use for creating a [`WeakDispatch`] is to allow a collector
635 /// to hold a cyclical reference to itself without creating a memory leak.
636 /// See [here] for details.
637 ///
638 /// [collector]: Collect
639 /// [`Arc::downgrade`]: std::sync::Arc::downgrade
640 /// [here]: Collect#avoiding-memory-leaks
641 pub fn downgrade(&self) -> WeakDispatch {
642 #[cfg(feature = "alloc")]
643 let collector = match &self.collector {
644 Kind::Global(dispatch) => Kind::Global(*dispatch),
645 Kind::Scoped(dispatch) => Kind::Scoped(Arc::downgrade(dispatch)),
646 };
647 #[cfg(not(feature = "alloc"))]
648 let collector = self.collector;
649
650 WeakDispatch { collector }
651 }
652
653 #[cfg(feature = "std")]
654 pub(crate) fn registrar(&self) -> Registrar {
655 Registrar(match self.collector {
656 Kind::Scoped(ref s) => Kind::Scoped(Arc::downgrade(s)),
657 Kind::Global(s) => Kind::Global(s),
658 })
659 }
660
661 #[inline(always)]
662 #[cfg(feature = "alloc")]
663 pub(crate) fn collector(&self) -> &(dyn Collect + Send + Sync) {
664 match self.collector {
665 Kind::Scoped(ref s) => Arc::deref(s),
666 Kind::Global(s) => s,
667 }
668 }
669
670 #[inline(always)]
671 #[cfg(not(feature = "alloc"))]
672 pub(crate) fn collector(&self) -> &(dyn Collect + Send + Sync) {
673 self.collector
674 }
675
676 /// Registers a new callsite with this collector, returning whether or not
677 /// the collector is interested in being notified about the callsite.
678 ///
679 /// This calls the [`register_callsite`] function on the [`Collect`]
680 /// that this `Dispatch` forwards to.
681 ///
682 /// [`Collect`]: super::collect::Collect
683 /// [`register_callsite`]: super::collect::Collect::register_callsite
684 #[inline]
685 pub fn register_callsite(&self, metadata: &'static Metadata<'static>) -> collect::Interest {
686 self.collector().register_callsite(metadata)
687 }
688
689 /// Returns the highest [verbosity level][level] that this [collector] will
690 /// enable, or `None`, if the collector does not implement level-based
691 /// filtering or chooses not to implement this method.
692 ///
693 /// This calls the [`max_level_hint`] function on the [`Collect`]
694 /// that this `Dispatch` forwards to.
695 ///
696 /// [level]: super::Level
697 /// [collector]: super::collect::Collect
698 /// [`Collect`]: super::collect::Collect
699 /// [`register_callsite`]: super::collect::Collect::max_level_hint
700 // TODO(eliza): consider making this a public API?
701 #[inline]
702 pub(crate) fn max_level_hint(&self) -> Option<LevelFilter> {
703 self.collector().max_level_hint()
704 }
705
706 /// Record the construction of a new span, returning a new [ID] for the
707 /// span being constructed.
708 ///
709 /// This calls the [`new_span`] function on the [`Collect`] that this
710 /// `Dispatch` forwards to.
711 ///
712 /// [ID]: super::span::Id
713 /// [`Collect`]: super::collect::Collect
714 /// [`new_span`]: super::collect::Collect::new_span
715 #[inline]
716 pub fn new_span(&self, span: &span::Attributes<'_>) -> span::Id {
717 self.collector().new_span(span)
718 }
719
720 /// Record a set of values on a span.
721 ///
722 /// This calls the [`record`] function on the [`Collect`] that this
723 /// `Dispatch` forwards to.
724 ///
725 /// [`Collect`]: super::collect::Collect
726 /// [`record`]: super::collect::Collect::record
727 #[inline]
728 pub fn record(&self, span: &span::Id, values: &span::Record<'_>) {
729 self.collector().record(span, values)
730 }
731
732 /// Adds an indication that `span` follows from the span with the id
733 /// `follows`.
734 ///
735 /// This calls the [`record_follows_from`] function on the [`Collect`]
736 /// that this `Dispatch` forwards to.
737 ///
738 /// [`Collect`]: super::collect::Collect
739 /// [`record_follows_from`]: super::collect::Collect::record_follows_from
740 #[inline]
741 pub fn record_follows_from(&self, span: &span::Id, follows: &span::Id) {
742 self.collector().record_follows_from(span, follows)
743 }
744
745 /// Returns true if a span with the specified [metadata] would be
746 /// recorded.
747 ///
748 /// This calls the [`enabled`] function on the [`Collect`] that this
749 /// `Dispatch` forwards to.
750 ///
751 /// [metadata]: super::metadata::Metadata
752 /// [`Collect`]: super::collect::Collect
753 /// [`enabled`]: super::collect::Collect::enabled
754 #[inline]
755 pub fn enabled(&self, metadata: &Metadata<'_>) -> bool {
756 self.collector().enabled(metadata)
757 }
758
759 /// Records that an [`Event`] has occurred.
760 ///
761 /// This calls the [`event`] function on the [`Collect`] that this
762 /// `Dispatch` forwards to.
763 ///
764 /// [`Event`]: super::event::Event
765 /// [`Collect`]: super::collect::Collect
766 /// [`event`]: super::collect::Collect::event
767 #[inline]
768 pub fn event(&self, event: &Event<'_>) {
769 let collector = self.collector();
770 if collector.event_enabled(event) {
771 collector.event(event);
772 }
773 }
774
775 /// Records that a span has been can_enter.
776 ///
777 /// This calls the [`enter`] function on the [`Collect`] that this
778 /// `Dispatch` forwards to.
779 ///
780 /// [`Collect`]: super::collect::Collect
781 /// [`enter`]: super::collect::Collect::enter
782 pub fn enter(&self, span: &span::Id) {
783 self.collector().enter(span);
784 }
785
786 /// Records that a span has been exited.
787 ///
788 /// This calls the [`exit`] function on the [`Collect`] that this
789 /// `Dispatch` forwards to.
790 ///
791 /// [`Collect`]: super::collect::Collect
792 /// [`exit`]: super::collect::Collect::exit
793 pub fn exit(&self, span: &span::Id) {
794 self.collector().exit(span);
795 }
796
797 /// Notifies the [collector] that a [span ID] has been cloned.
798 ///
799 /// This function must only be called with span IDs that were returned by
800 /// this `Dispatch`'s [`new_span`] function. The `tracing` crate upholds
801 /// this guarantee and any other libraries implementing instrumentation APIs
802 /// must as well.
803 ///
804 /// This calls the [`clone_span`] function on the [`Collect`] that this
805 /// `Dispatch` forwards to.
806 ///
807 /// [span ID]: super::span::Id
808 /// [collector]: super::collect::Collect
809 /// [`clone_span`]: super::collect::Collect::clone_span
810 /// [`new_span`]: super::collect::Collect::new_span
811 #[inline]
812 pub fn clone_span(&self, id: &span::Id) -> span::Id {
813 self.collector().clone_span(id)
814 }
815
816 /// Notifies the collector that a [span ID] has been dropped.
817 ///
818 /// This function must only be called with span IDs that were returned by
819 /// this `Dispatch`'s [`new_span`] function. The `tracing` crate upholds
820 /// this guarantee and any other libraries implementing instrumentation APIs
821 /// must as well.
822 ///
823 /// This calls the [`drop_span`] function on the [`Collect`] that this
824 /// `Dispatch` forwards to.
825 ///
826 /// <div class="example-wrap" style="display:inline-block"><pre class="compile_fail" style="white-space:normal;font:inherit;">
827 ///
828 /// **Deprecated**: The [`try_close`] method is functionally identical, but returns `true` if the span is now closed.
829 /// It should be used instead of this method.
830 ///
831 /// </pre></div>
832 ///
833 /// [span ID]: super::span::Id
834 /// [`Collect`]: super::collect::Collect
835 /// [`drop_span`]: super::collect::Collect::drop_span
836 /// [`new_span`]: super::collect::Collect::new_span
837 /// [`try_close`]: Self::try_close
838 #[inline]
839 #[deprecated(since = "0.1.2", note = "use `Dispatch::try_close` instead")]
840 pub fn drop_span(&self, id: span::Id) {
841 #[allow(deprecated)]
842 self.collector().drop_span(id);
843 }
844
845 /// Notifies the collector that a [span ID] has been dropped, and returns
846 /// `true` if there are now 0 IDs referring to that span.
847 ///
848 /// This function must only be called with span IDs that were returned by
849 /// this `Dispatch`'s [`new_span`] function. The `tracing` crate upholds
850 /// this guarantee and any other libraries implementing instrumentation APIs
851 /// must as well.
852 ///
853 /// This calls the [`try_close`] function on the [`Collect`] trait
854 /// that this `Dispatch` forwards to.
855 ///
856 /// [span ID]: super::span::Id
857 /// [`Collect`]: super::collect::Collect
858 /// [`try_close`]: super::collect::Collect::try_close
859 /// [`new_span`]: super::collect::Collect::new_span
860 pub fn try_close(&self, id: span::Id) -> bool {
861 self.collector().try_close(id)
862 }
863
864 /// Returns a type representing this collector's view of the current span.
865 ///
866 /// This calls the [`current`] function on the [`Collect`] that this
867 /// `Dispatch` forwards to.
868 ///
869 /// [`Collect`]: super::collect::Collect
870 /// [`current`]: super::collect::Collect::current_span
871 #[inline]
872 pub fn current_span(&self) -> span::Current {
873 self.collector().current_span()
874 }
875
876 /// Returns `true` if this `Dispatch` forwards to a collector of type
877 /// `T`.
878 #[inline]
879 pub fn is<T: Any>(&self) -> bool {
880 <dyn Collect>::is::<T>(self.collector())
881 }
882
883 /// Returns some reference to the [`Collect`] this `Dispatch` forwards to
884 /// if it is of type `T`, or `None` if it isn't.
885 ///
886 /// [`Collect`]: super::collect::Collect
887 #[inline]
888 pub fn downcast_ref<T: Any>(&self) -> Option<&T> {
889 <dyn Collect>::downcast_ref(self.collector())
890 }
891}
892
893impl Default for Dispatch {
894 /// Returns the current default dispatcher
895 fn default() -> Self {
896 get_default(|default| default.clone())
897 }
898}
899
900impl fmt::Debug for Dispatch {
901 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
902 match &self.collector {
903 #[cfg(feature = "alloc")]
904 Kind::Global(collector) => f
905 .debug_tuple("Dispatch::Global")
906 .field(&format_args!("{:p}", collector))
907 .finish(),
908
909 #[cfg(feature = "alloc")]
910 Kind::Scoped(collector) => f
911 .debug_tuple("Dispatch::Scoped")
912 .field(&format_args!("{:p}", collector))
913 .finish(),
914
915 #[cfg(not(feature = "alloc"))]
916 collector => f
917 .debug_tuple("Dispatch::Global")
918 .field(&format_args!("{:p}", collector))
919 .finish(),
920 }
921 }
922}
923
924#[cfg(feature = "std")]
925impl<C> From<C> for Dispatch
926where
927 C: Collect + Send + Sync + 'static,
928{
929 #[inline]
930 fn from(collector: C) -> Self {
931 Dispatch::new(collector)
932 }
933}
934
935impl WeakDispatch {
936 /// Attempts to upgrade this `WeakDispatch` to a [`Dispatch`].
937 ///
938 /// Returns `None` if the referenced `Dispatch` has already been dropped.
939 ///
940 /// ## Examples
941 ///
942 /// ```
943 /// # use tracing_core::collect::NoCollector;
944 /// # use tracing_core::dispatch::Dispatch;
945 /// static COLLECTOR: NoCollector = NoCollector::new();
946 /// let strong = Dispatch::new(COLLECTOR);
947 /// let weak = strong.downgrade();
948 ///
949 /// // The strong here keeps it alive, so we can still access the object.
950 /// assert!(weak.upgrade().is_some());
951 ///
952 /// drop(strong); // But not any more.
953 /// assert!(weak.upgrade().is_none());
954 /// ```
955 pub fn upgrade(&self) -> Option<Dispatch> {
956 #[cfg(feature = "alloc")]
957 let collector = match &self.collector {
958 Kind::Global(dispatch) => Some(Kind::Global(*dispatch)),
959 Kind::Scoped(dispatch) => dispatch.upgrade().map(Kind::Scoped),
960 };
961 #[cfg(not(feature = "alloc"))]
962 let collector = Some(self.collector);
963
964 collector.map(|collector| Dispatch { collector })
965 }
966}
967
968impl fmt::Debug for WeakDispatch {
969 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
970 match &self.collector {
971 #[cfg(feature = "alloc")]
972 Kind::Global(collector) => f
973 .debug_tuple("WeakDispatch::Global")
974 .field(&format_args!("{:p}", collector))
975 .finish(),
976
977 #[cfg(feature = "alloc")]
978 Kind::Scoped(collector) => f
979 .debug_tuple("WeakDispatch::Scoped")
980 .field(&format_args!("{:p}", collector))
981 .finish(),
982
983 #[cfg(not(feature = "alloc"))]
984 collector => f
985 .debug_tuple("WeakDispatch::Global")
986 .field(&format_args!("{:p}", collector))
987 .finish(),
988 }
989 }
990}
991
992#[cfg(feature = "std")]
993impl Registrar {
994 pub(crate) fn upgrade(&self) -> Option<Dispatch> {
995 match self.0 {
996 Kind::Global(s) => Some(Dispatch {
997 collector: Kind::Global(s),
998 }),
999 Kind::Scoped(ref s) => s.upgrade().map(|s| Dispatch {
1000 collector: Kind::Scoped(s),
1001 }),
1002 }
1003 }
1004}
1005
1006// ===== impl State =====
1007
1008#[cfg(feature = "std")]
1009impl State {
1010 /// Replaces the current default dispatcher on this thread with the provided
1011 /// dispatcher.
1012 ///
1013 /// Dropping the returned `ResetGuard` will reset the default dispatcher to
1014 /// the previous value.
1015 #[inline]
1016 fn set_default(new_dispatch: Dispatch) -> DefaultGuard {
1017 let prior = CURRENT_STATE
1018 .try_with(|state| {
1019 state.can_enter.set(true);
1020 state
1021 .default
1022 .replace(Some(new_dispatch))
1023 // if the scoped default was not set on this thread, set the
1024 // `prior` default to the global default to populate the
1025 // scoped default when unsetting *this* default
1026 .unwrap_or_else(|| get_global().clone())
1027 })
1028 .ok();
1029 EXISTS.store(true, Ordering::Release);
1030 SCOPED_COUNT.fetch_add(1, Ordering::Release);
1031 DefaultGuard(prior)
1032 }
1033
1034 #[inline]
1035 fn enter(&self) -> Option<Entered<'_>> {
1036 if self.can_enter.replace(false) {
1037 Some(Entered(self))
1038 } else {
1039 None
1040 }
1041 }
1042}
1043
1044// ===== impl Entered =====
1045
1046#[cfg(feature = "std")]
1047impl<'a> Entered<'a> {
1048 #[inline]
1049 fn current(&self) -> RefMut<'a, Dispatch> {
1050 let default = self.0.default.borrow_mut();
1051 RefMut::map(default, |default| {
1052 default.get_or_insert_with(|| get_global().clone())
1053 })
1054 }
1055}
1056
1057#[cfg(feature = "std")]
1058impl Drop for Entered<'_> {
1059 #[inline]
1060 fn drop(&mut self) {
1061 self.0.can_enter.set(true);
1062 }
1063}
1064
1065// ===== impl DefaultGuard =====
1066
1067#[cfg(feature = "std")]
1068impl Drop for DefaultGuard {
1069 #[inline]
1070 fn drop(&mut self) {
1071 SCOPED_COUNT.fetch_sub(1, Ordering::Release);
1072 if let Some(dispatch) = self.0.take() {
1073 // Replace the dispatcher and then drop the old one outside
1074 // of the thread-local context. Dropping the dispatch may
1075 // lead to the drop of a collector which, in the process,
1076 // could then also attempt to access the same thread local
1077 // state -- causing a clash.
1078 let prev = CURRENT_STATE.try_with(|state| state.default.replace(Some(dispatch)));
1079 drop(prev)
1080 }
1081 }
1082}
1083
1084#[cfg(test)]
1085mod test {
1086
1087 use super::*;
1088 use crate::{
1089 callsite::Callsite,
1090 collect::Interest,
1091 metadata::{Kind, Level, Metadata},
1092 };
1093
1094 #[test]
1095 fn dispatch_is() {
1096 let dispatcher = Dispatch::from_static(&NO_COLLECTOR);
1097 assert!(dispatcher.is::<NoCollector>());
1098 }
1099
1100 #[test]
1101 fn dispatch_downcasts() {
1102 let dispatcher = Dispatch::from_static(&NO_COLLECTOR);
1103 assert!(dispatcher.downcast_ref::<NoCollector>().is_some());
1104 }
1105
1106 struct TestCallsite;
1107 static TEST_CALLSITE: TestCallsite = TestCallsite;
1108 static TEST_META: Metadata<'static> = metadata! {
1109 name: "test",
1110 target: module_path!(),
1111 level: Level::DEBUG,
1112 fields: &[],
1113 callsite: &TEST_CALLSITE,
1114 kind: Kind::EVENT
1115 };
1116
1117 impl Callsite for TestCallsite {
1118 fn set_interest(&self, _: Interest) {}
1119 fn metadata(&self) -> &Metadata<'_> {
1120 &TEST_META
1121 }
1122 }
1123
1124 #[test]
1125 #[cfg(feature = "std")]
1126 fn events_dont_infinite_loop() {
1127 // This test ensures that an event triggered within a collector
1128 // won't cause an infinite loop of events.
1129 struct TestCollector;
1130 impl Collect for TestCollector {
1131 fn enabled(&self, _: &Metadata<'_>) -> bool {
1132 true
1133 }
1134
1135 fn new_span(&self, _: &span::Attributes<'_>) -> span::Id {
1136 span::Id::from_u64(0xAAAA)
1137 }
1138
1139 fn record(&self, _: &span::Id, _: &span::Record<'_>) {}
1140
1141 fn record_follows_from(&self, _: &span::Id, _: &span::Id) {}
1142
1143 fn event(&self, _: &Event<'_>) {
1144 static EVENTS: AtomicUsize = AtomicUsize::new(0);
1145 assert_eq!(
1146 EVENTS.fetch_add(1, Ordering::Relaxed),
1147 0,
1148 "event method called twice!"
1149 );
1150 Event::dispatch(&TEST_META, &TEST_META.fields().value_set(&[]))
1151 }
1152
1153 fn enter(&self, _: &span::Id) {}
1154
1155 fn exit(&self, _: &span::Id) {}
1156
1157 fn current_span(&self) -> span::Current {
1158 span::Current::unknown()
1159 }
1160 }
1161
1162 with_default(&Dispatch::new(TestCollector), || {
1163 Event::dispatch(&TEST_META, &TEST_META.fields().value_set(&[]))
1164 })
1165 }
1166
1167 #[test]
1168 #[cfg(feature = "std")]
1169 fn spans_dont_infinite_loop() {
1170 // This test ensures that a span created within a collector
1171 // won't cause an infinite loop of new spans.
1172
1173 fn mk_span() {
1174 get_default(|current| {
1175 current.new_span(&span::Attributes::new(
1176 &TEST_META,
1177 &TEST_META.fields().value_set(&[]),
1178 ))
1179 });
1180 }
1181
1182 struct TestCollector;
1183 impl Collect for TestCollector {
1184 fn enabled(&self, _: &Metadata<'_>) -> bool {
1185 true
1186 }
1187
1188 fn new_span(&self, _: &span::Attributes<'_>) -> span::Id {
1189 static NEW_SPANS: AtomicUsize = AtomicUsize::new(0);
1190 assert_eq!(
1191 NEW_SPANS.fetch_add(1, Ordering::Relaxed),
1192 0,
1193 "new_span method called twice!"
1194 );
1195 mk_span();
1196 span::Id::from_u64(0xAAAA)
1197 }
1198
1199 fn record(&self, _: &span::Id, _: &span::Record<'_>) {}
1200
1201 fn record_follows_from(&self, _: &span::Id, _: &span::Id) {}
1202
1203 fn event(&self, _: &Event<'_>) {}
1204
1205 fn enter(&self, _: &span::Id) {}
1206
1207 fn exit(&self, _: &span::Id) {}
1208
1209 fn current_span(&self) -> span::Current {
1210 span::Current::unknown()
1211 }
1212 }
1213
1214 with_default(&Dispatch::new(TestCollector), mk_span)
1215 }
1216
1217 #[test]
1218 fn default_no_collector() {
1219 let default_dispatcher = Dispatch::default();
1220 assert!(default_dispatcher.is::<NoCollector>());
1221 }
1222
1223 #[cfg(feature = "std")]
1224 #[test]
1225 fn default_dispatch() {
1226 struct TestCollector;
1227 impl Collect for TestCollector {
1228 fn enabled(&self, _: &Metadata<'_>) -> bool {
1229 true
1230 }
1231
1232 fn new_span(&self, _: &span::Attributes<'_>) -> span::Id {
1233 span::Id::from_u64(0xAAAA)
1234 }
1235
1236 fn record(&self, _: &span::Id, _: &span::Record<'_>) {}
1237
1238 fn record_follows_from(&self, _: &span::Id, _: &span::Id) {}
1239
1240 fn event(&self, _: &Event<'_>) {}
1241
1242 fn enter(&self, _: &span::Id) {}
1243
1244 fn exit(&self, _: &span::Id) {}
1245
1246 fn current_span(&self) -> span::Current {
1247 span::Current::unknown()
1248 }
1249 }
1250 let guard = set_default(&Dispatch::new(TestCollector));
1251 let default_dispatcher = Dispatch::default();
1252 assert!(default_dispatcher.is::<TestCollector>());
1253
1254 drop(guard);
1255 let default_dispatcher = Dispatch::default();
1256 assert!(default_dispatcher.is::<NoCollector>());
1257 }
1258}