wasm_bindgen_futures/
queue.rs

1use alloc::collections::VecDeque;
2use alloc::rc::Rc;
3use core::cell::{Cell, RefCell};
4use js_sys::Promise;
5use wasm_bindgen::prelude::*;
6
7#[wasm_bindgen]
8extern "C" {
9    #[wasm_bindgen]
10    fn queueMicrotask(closure: &Closure<dyn FnMut(JsValue)>);
11
12    type Global;
13
14    #[wasm_bindgen(method, getter, js_name = queueMicrotask)]
15    fn hasQueueMicrotask(this: &Global) -> JsValue;
16}
17
18struct QueueState {
19    // The queue of Tasks which are to be run in order. In practice this is all the
20    // synchronous work of futures, and each `Task` represents calling `poll` on
21    // a future "at the right time".
22    tasks: RefCell<VecDeque<Rc<crate::task::Task>>>,
23
24    // This flag indicates whether we've scheduled `run_all` to run in the future.
25    // This is used to ensure that it's only scheduled once.
26    is_scheduled: Cell<bool>,
27}
28
29impl QueueState {
30    fn run_all(&self) {
31        // "consume" the schedule
32        let _was_scheduled = self.is_scheduled.replace(false);
33        debug_assert!(_was_scheduled);
34
35        // Stop when all tasks that have been scheduled before this tick have been run.
36        // Tasks that are scheduled while running tasks will run on the next tick.
37        let mut task_count_left = self.tasks.borrow().len();
38        while task_count_left > 0 {
39            task_count_left -= 1;
40            let task = match self.tasks.borrow_mut().pop_front() {
41                Some(task) => task,
42                None => break,
43            };
44            task.run();
45        }
46
47        // All of the Tasks have been run, so it's now possible to schedule the
48        // next tick again
49    }
50}
51
52pub(crate) struct Queue {
53    state: Rc<QueueState>,
54    promise: Promise,
55    closure: Closure<dyn FnMut(JsValue)>,
56    has_queue_microtask: bool,
57}
58
59impl Queue {
60    // Schedule a task to run on the next tick
61    pub(crate) fn schedule_task(&self, task: Rc<crate::task::Task>) {
62        self.state.tasks.borrow_mut().push_back(task);
63        // Use queueMicrotask to execute as soon as possible. If it does not exist
64        // fall back to the promise resolution
65        if !self.state.is_scheduled.replace(true) {
66            if self.has_queue_microtask {
67                queueMicrotask(&self.closure);
68            } else {
69                let _ = self.promise.then(&self.closure);
70            }
71        }
72    }
73    // Append a task to the currently running queue, or schedule it
74    #[cfg(not(target_feature = "atomics"))]
75    pub(crate) fn push_task(&self, task: Rc<crate::task::Task>) {
76        // It would make sense to run this task on the same tick.  For now, we
77        // make the simplifying choice of always scheduling tasks for a future tick.
78        self.schedule_task(task)
79    }
80}
81
82impl Queue {
83    fn new() -> Self {
84        let state = Rc::new(QueueState {
85            is_scheduled: Cell::new(false),
86            tasks: RefCell::new(VecDeque::new()),
87        });
88
89        let has_queue_microtask = js_sys::global()
90            .unchecked_into::<Global>()
91            .hasQueueMicrotask()
92            .is_function();
93
94        Self {
95            promise: Promise::resolve(&JsValue::undefined()),
96
97            closure: {
98                let state = Rc::clone(&state);
99
100                // This closure will only be called on the next microtask event
101                // tick
102                Closure::new(move |_| state.run_all())
103            },
104
105            state,
106            has_queue_microtask,
107        }
108    }
109
110    pub(crate) fn with<R>(f: impl FnOnce(&Self) -> R) -> R {
111        use once_cell::unsync::Lazy;
112
113        struct Wrapper<T>(Lazy<T>);
114
115        #[cfg(not(target_feature = "atomics"))]
116        unsafe impl<T> Sync for Wrapper<T> {}
117
118        #[cfg(not(target_feature = "atomics"))]
119        unsafe impl<T> Send for Wrapper<T> {}
120
121        #[cfg_attr(target_feature = "atomics", thread_local)]
122        static QUEUE: Wrapper<Queue> = Wrapper(Lazy::new(Queue::new));
123
124        f(&QUEUE.0)
125    }
126}