Deep Dive into React Scheduler: Working Principles and Implementation
Introduction
The React Scheduler is a core part of React’s concurrent mode, enabling React to prioritize and coordinate different updates. This article explores how the Scheduler works internally and how it enables React’s concurrent features.
Core Concepts
The React Scheduler is built on several key concepts:
- Priority Levels
- Task Queue
- Time Slicing
- Cooperative Scheduling
Scheduler Implementation
Here’s a look at the core scheduler implementation:
const Scheduler = {
unstable_scheduleCallback,
unstable_cancelCallback,
unstable_shouldYield,
unstable_requestPaint,
unstable_now,
unstable_getCurrentPriorityLevel,
unstable_ImmediatePriority,
unstable_UserBlockingPriority,
unstable_NormalPriority,
unstable_LowPriority,
unstable_IdlePriority,
};
// Priority levels
const ImmediatePriority = 1;
const UserBlockingPriority = 2;
const NormalPriority = 3;
const LowPriority = 4;
const IdlePriority = 5;
const Scheduler = {
unstable_scheduleCallback,
unstable_cancelCallback,
unstable_shouldYield,
unstable_requestPaint,
unstable_now,
unstable_getCurrentPriorityLevel,
unstable_ImmediatePriority,
unstable_UserBlockingPriority,
unstable_NormalPriority,
unstable_LowPriority,
unstable_IdlePriority,
};
// Priority levels
const ImmediatePriority = 1;
const UserBlockingPriority = 2;
const NormalPriority = 3;
const LowPriority = 4;
const IdlePriority = 5;
Task Scheduling
The scheduler manages tasks through a priority queue:
function unstable_scheduleCallback(priorityLevel, callback, options) {
const currentTime = getCurrentTime();
const startTime = currentTime;
if (options?.delay) {
startTime = currentTime + options.delay;
}
const timeout = getTimeoutForPriority(priorityLevel);
const expirationTime = startTime + timeout;
const newTask = {
id: taskIdCounter++,
callback,
priorityLevel,
startTime,
expirationTime,
sortIndex: -1,
};
if (startTime > currentTime) {
// This is a delayed task
newTask.sortIndex = startTime;
push(timerQueue, newTask);
} else {
newTask.sortIndex = expirationTime;
push(taskQueue, newTask);
if (!isHostCallbackScheduled && !isPerformingWork) {
isHostCallbackScheduled = true;
requestHostCallback(flushWork);
}
}
return newTask;
}
function unstable_scheduleCallback(priorityLevel, callback, options) {
const currentTime = getCurrentTime();
const startTime = currentTime;
if (options?.delay) {
startTime = currentTime + options.delay;
}
const timeout = getTimeoutForPriority(priorityLevel);
const expirationTime = startTime + timeout;
const newTask = {
id: taskIdCounter++,
callback,
priorityLevel,
startTime,
expirationTime,
sortIndex: -1,
};
if (startTime > currentTime) {
// This is a delayed task
newTask.sortIndex = startTime;
push(timerQueue, newTask);
} else {
newTask.sortIndex = expirationTime;
push(taskQueue, newTask);
if (!isHostCallbackScheduled && !isPerformingWork) {
isHostCallbackScheduled = true;
requestHostCallback(flushWork);
}
}
return newTask;
}
Time Slicing
The scheduler implements time slicing to break up long-running tasks:
function workLoop(hasTimeRemaining, initialTime) {
let currentTime = initialTime;
currentTask = peek(taskQueue);
while (currentTask !== null) {
if (
currentTask.expirationTime > currentTime &&
(!hasTimeRemaining || shouldYieldToHost())
) {
// This task hasn't expired, and we've reached our time limit
break;
}
const callback = currentTask.callback;
if (typeof callback === 'function') {
currentTask.callback = null;
currentPriorityLevel = currentTask.priorityLevel;
const didUserCallbackTimeout =
currentTask.expirationTime <= currentTime;
const continuationCallback = callback(didUserCallbackTimeout);
currentTime = getCurrentTime();
if (typeof continuationCallback === 'function') {
currentTask.callback = continuationCallback;
} else {
if (currentTask === peek(taskQueue)) {
pop(taskQueue);
}
}
} else {
pop(taskQueue);
}
currentTask = peek(taskQueue);
}
return currentTask !== null;
}
function workLoop(hasTimeRemaining, initialTime) {
let currentTime = initialTime;
currentTask = peek(taskQueue);
while (currentTask !== null) {
if (
currentTask.expirationTime > currentTime &&
(!hasTimeRemaining || shouldYieldToHost())
) {
// This task hasn't expired, and we've reached our time limit
break;
}
const callback = currentTask.callback;
if (typeof callback === 'function') {
currentTask.callback = null;
currentPriorityLevel = currentTask.priorityLevel;
const didUserCallbackTimeout =
currentTask.expirationTime <= currentTime;
const continuationCallback = callback(didUserCallbackTimeout);
currentTime = getCurrentTime();
if (typeof continuationCallback === 'function') {
currentTask.callback = continuationCallback;
} else {
if (currentTask === peek(taskQueue)) {
pop(taskQueue);
}
}
} else {
pop(taskQueue);
}
currentTask = peek(taskQueue);
}
return currentTask !== null;
}
Priority Management
How the scheduler manages different priority levels:
function getTimeoutForPriority(priority) {
switch (priority) {
case ImmediatePriority:
return IMMEDIATE_PRIORITY_TIMEOUT;
case UserBlockingPriority:
return USER_BLOCKING_PRIORITY_TIMEOUT;
case IdlePriority:
return IDLE_PRIORITY_TIMEOUT;
case LowPriority:
return LOW_PRIORITY_TIMEOUT;
case NormalPriority:
default:
return NORMAL_PRIORITY_TIMEOUT;
}
}
function shouldYieldToHost() {
const currentTime = getCurrentTime();
if (currentTime >= deadline) {
if (needsPaint || scheduling.isInputPending()) {
return true;
}
return currentTime >= maxYieldInterval;
} else {
return false;
}
}
function getTimeoutForPriority(priority) {
switch (priority) {
case ImmediatePriority:
return IMMEDIATE_PRIORITY_TIMEOUT;
case UserBlockingPriority:
return USER_BLOCKING_PRIORITY_TIMEOUT;
case IdlePriority:
return IDLE_PRIORITY_TIMEOUT;
case LowPriority:
return LOW_PRIORITY_TIMEOUT;
case NormalPriority:
default:
return NORMAL_PRIORITY_TIMEOUT;
}
}
function shouldYieldToHost() {
const currentTime = getCurrentTime();
if (currentTime >= deadline) {
if (needsPaint || scheduling.isInputPending()) {
return true;
}
return currentTime >= maxYieldInterval;
} else {
return false;
}
}
Task Queue Implementation
The scheduler uses a min-heap for the task queue:
function push(heap, node) {
const index = heap.length;
heap.push(node);
siftUp(heap, node, index);
}
function pop(heap) {
if (heap.length === 0) {
return null;
}
const first = heap[0];
const last = heap.pop();
if (last !== first) {
heap[0] = last;
siftDown(heap, last, 0);
}
return first;
}
function siftUp(heap, node, i) {
let index = i;
while (index > 0) {
const parentIndex = (index - 1) >>> 1;
const parent = heap[parentIndex];
if (compare(parent, node) > 0) {
heap[parentIndex] = node;
heap[index] = parent;
index = parentIndex;
} else {
return;
}
}
}
function push(heap, node) {
const index = heap.length;
heap.push(node);
siftUp(heap, node, index);
}
function pop(heap) {
if (heap.length === 0) {
return null;
}
const first = heap[0];
const last = heap.pop();
if (last !== first) {
heap[0] = last;
siftDown(heap, last, 0);
}
return first;
}
function siftUp(heap, node, i) {
let index = i;
while (index > 0) {
const parentIndex = (index - 1) >>> 1;
const parent = heap[parentIndex];
if (compare(parent, node) > 0) {
heap[parentIndex] = node;
heap[index] = parent;
index = parentIndex;
} else {
return;
}
}
}
Cooperative Scheduling
How the scheduler cooperates with the browser:
const channel = new MessageChannel();
const port = channel.port2;
requestHostCallback = function(callback) {
scheduledHostCallback = callback;
if (!isMessageLoopRunning) {
isMessageLoopRunning = true;
port.postMessage(null);
}
};
const performWorkUntilDeadline = () => {
if (scheduledHostCallback !== null) {
const currentTime = getCurrentTime();
deadline = currentTime + yieldInterval;
const hasTimeRemaining = true;
try {
const hasMoreWork = scheduledHostCallback(
hasTimeRemaining,
currentTime,
);
if (!hasMoreWork) {
isMessageLoopRunning = false;
scheduledHostCallback = null;
} else {
port.postMessage(null);
}
} catch (error) {
port.postMessage(null);
throw error;
}
} else {
isMessageLoopRunning = false;
}
};
const channel = new MessageChannel();
const port = channel.port2;
requestHostCallback = function(callback) {
scheduledHostCallback = callback;
if (!isMessageLoopRunning) {
isMessageLoopRunning = true;
port.postMessage(null);
}
};
const performWorkUntilDeadline = () => {
if (scheduledHostCallback !== null) {
const currentTime = getCurrentTime();
deadline = currentTime + yieldInterval;
const hasTimeRemaining = true;
try {
const hasMoreWork = scheduledHostCallback(
hasTimeRemaining,
currentTime,
);
if (!hasMoreWork) {
isMessageLoopRunning = false;
scheduledHostCallback = null;
} else {
port.postMessage(null);
}
} catch (error) {
port.postMessage(null);
throw error;
}
} else {
isMessageLoopRunning = false;
}
};
Integration with React
How the scheduler integrates with React’s reconciler:
function scheduleCallback(priorityLevel, callback) {
const currentTime = getCurrentTime();
const startTime = currentTime;
const timeout = getTimeoutForPriority(priorityLevel);
const expirationTime = startTime + timeout;
const newTask = {
callback,
priorityLevel,
startTime,
expirationTime,
next: null,
previous: null,
};
pushTask(newTask);
return newTask;
}
function scheduleUpdateOnFiber(fiber, lane, eventTime) {
const root = markUpdateLaneFromFiberToRoot(fiber, lane);
if (root === null) {
return null;
}
// Mark that the root has a pending update
markRootUpdated(root, lane, eventTime);
if (root === workInProgressRoot) {
// Received an update to a tree that's in the middle of rendering
if ((workInProgressRootRenderLanes & lane) === NoLanes) {
// This update doesn't have sufficient priority
// Add it to the list of pending lanes
workInProgressRootUpdatedLanes |= lane;
}
}
const priorityLevel = getCurrentPriorityLevel();
if (lane === SyncLane) {
scheduleSyncCallback(performSyncWorkOnRoot.bind(null, root));
} else {
scheduleCallback(
priorityLevel,
performConcurrentWorkOnRoot.bind(null, root),
);
}
return root;
}
function scheduleCallback(priorityLevel, callback) {
const currentTime = getCurrentTime();
const startTime = currentTime;
const timeout = getTimeoutForPriority(priorityLevel);
const expirationTime = startTime + timeout;
const newTask = {
callback,
priorityLevel,
startTime,
expirationTime,
next: null,
previous: null,
};
pushTask(newTask);
return newTask;
}
function scheduleUpdateOnFiber(fiber, lane, eventTime) {
const root = markUpdateLaneFromFiberToRoot(fiber, lane);
if (root === null) {
return null;
}
// Mark that the root has a pending update
markRootUpdated(root, lane, eventTime);
if (root === workInProgressRoot) {
// Received an update to a tree that's in the middle of rendering
if ((workInProgressRootRenderLanes & lane) === NoLanes) {
// This update doesn't have sufficient priority
// Add it to the list of pending lanes
workInProgressRootUpdatedLanes |= lane;
}
}
const priorityLevel = getCurrentPriorityLevel();
if (lane === SyncLane) {
scheduleSyncCallback(performSyncWorkOnRoot.bind(null, root));
} else {
scheduleCallback(
priorityLevel,
performConcurrentWorkOnRoot.bind(null, root),
);
}
return root;
}
Performance Considerations
The scheduler includes several performance optimizations:
- Task Batching:
function batchedUpdates(fn) {
const prevIsBatchingUpdates = isBatchingUpdates;
isBatchingUpdates = true;
try {
return fn();
} finally {
isBatchingUpdates = prevIsBatchingUpdates;
if (!isBatchingUpdates) {
flushSyncCallbackQueue();
}
}
}
function batchedUpdates(fn) {
const prevIsBatchingUpdates = isBatchingUpdates;
isBatchingUpdates = true;
try {
return fn();
} finally {
isBatchingUpdates = prevIsBatchingUpdates;
if (!isBatchingUpdates) {
flushSyncCallbackQueue();
}
}
}
- Priority Inheritance:
function runWithPriority(priorityLevel, fn) {
const previousPriorityLevel = currentPriorityLevel;
try {
currentPriorityLevel = priorityLevel;
return fn();
} finally {
currentPriorityLevel = previousPriorityLevel;
}
}
function runWithPriority(priorityLevel, fn) {
const previousPriorityLevel = currentPriorityLevel;
try {
currentPriorityLevel = priorityLevel;
return fn();
} finally {
currentPriorityLevel = previousPriorityLevel;
}
}
Best Practices
- Proper Priority Usage:
// High-priority update
React.startTransition(() => {
// Low-priority update
setState(newValue);
});
// High-priority update
React.startTransition(() => {
// Low-priority update
setState(newValue);
});
- Avoiding Starvation:
function scheduleWork(callback) {
const priorityLevel = getCurrentPriorityLevel();
const timeout = getTimeoutForPriority(priorityLevel);
// Ensure low-priority tasks eventually complete
if (timeout > MAX_TIMEOUT) {
scheduleCallback(NormalPriority, callback);
} else {
scheduleCallback(priorityLevel, callback);
}
}
function scheduleWork(callback) {
const priorityLevel = getCurrentPriorityLevel();
const timeout = getTimeoutForPriority(priorityLevel);
// Ensure low-priority tasks eventually complete
if (timeout > MAX_TIMEOUT) {
scheduleCallback(NormalPriority, callback);
} else {
scheduleCallback(priorityLevel, callback);
}
}
Conclusion
The React Scheduler is a sophisticated system that enables React’s concurrent features by:
- Managing task priorities effectively
- Implementing cooperative scheduling with the browser
- Providing mechanisms for time-slicing and task interruption
- Enabling smooth user interactions through priority-based scheduling
Understanding the scheduler’s implementation helps developers better utilize React’s concurrent features and build more responsive applications. As React continues to evolve, the scheduler remains a crucial part of React’s architecture, enabling new features and improvements in application performance.