1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155
|
// SPDX-License-Identifier: GPL-2.0-only
#include <linux/interval_tree.h>
#include <linux/interval_tree_generic.h>
#include <linux/compiler.h>
#include <linux/export.h>
#define START(node) ((node)->start)
#define LAST(node) ((node)->last)
INTERVAL_TREE_DEFINE(struct interval_tree_node, rb,
unsigned long, __subtree_last,
START, LAST,, interval_tree)
EXPORT_SYMBOL_GPL(interval_tree_insert);
EXPORT_SYMBOL_GPL(interval_tree_remove);
EXPORT_SYMBOL_GPL(interval_tree_iter_first);
EXPORT_SYMBOL_GPL(interval_tree_iter_next);
#ifdef CONFIG_INTERVAL_TREE_SPAN_ITER
/*
* Roll nodes[1] into nodes[0] by advancing nodes[1] to the end of a contiguous
* span of nodes. This makes nodes[0]->last the end of that contiguous used span
* of indexes that started at the original nodes[1]->start.
*
* If there is an interior hole, nodes[1] is now the first node starting the
* next used span. A hole span is between nodes[0]->last and nodes[1]->start.
*
* If there is a tailing hole, nodes[1] is now NULL. A hole span is between
* nodes[0]->last and last_index.
*
* If the contiguous used range span to last_index, nodes[1] is set to NULL.
*/
static void
interval_tree_span_iter_next_gap(struct interval_tree_span_iter *state)
{
struct interval_tree_node *cur = state->nodes[1];
state->nodes[0] = cur;
do {
if (cur->last > state->nodes[0]->last)
state->nodes[0] = cur;
cur = interval_tree_iter_next(cur, state->first_index,
state->last_index);
} while (cur && (state->nodes[0]->last >= cur->start ||
state->nodes[0]->last + 1 == cur->start));
state->nodes[1] = cur;
}
void interval_tree_span_iter_first(struct interval_tree_span_iter *iter,
struct rb_root_cached *itree,
unsigned long first_index,
unsigned long last_index)
{
iter->first_index = first_index;
iter->last_index = last_index;
iter->nodes[0] = NULL;
iter->nodes[1] =
interval_tree_iter_first(itree, first_index, last_index);
if (!iter->nodes[1]) {
/* No nodes intersect the span, whole span is hole */
iter->start_hole = first_index;
iter->last_hole = last_index;
iter->is_hole = 1;
return;
}
if (iter->nodes[1]->start > first_index) {
/* Leading hole on first iteration */
iter->start_hole = first_index;
iter->last_hole = iter->nodes[1]->start - 1;
iter->is_hole = 1;
interval_tree_span_iter_next_gap(iter);
return;
}
/* Starting inside a used */
iter->start_used = first_index;
iter->is_hole = 0;
interval_tree_span_iter_next_gap(iter);
iter->last_used = iter->nodes[0]->last;
if (iter->last_used >= last_index) {
iter->last_used = last_index;
iter->nodes[0] = NULL;
iter->nodes[1] = NULL;
}
}
EXPORT_SYMBOL_GPL(interval_tree_span_iter_first);
void interval_tree_span_iter_next(struct interval_tree_span_iter *iter)
{
if (!iter->nodes[0] && !iter->nodes[1]) {
iter->is_hole = -1;
return;
}
if (iter->is_hole) {
iter->start_used = iter->last_hole + 1;
iter->last_used = iter->nodes[0]->last;
if (iter->last_used >= iter->last_index) {
iter->last_used = iter->last_index;
iter->nodes[0] = NULL;
iter->nodes[1] = NULL;
}
iter->is_hole = 0;
return;
}
if (!iter->nodes[1]) {
/* Trailing hole */
iter->start_hole = iter->nodes[0]->last + 1;
iter->last_hole = iter->last_index;
iter->nodes[0] = NULL;
iter->is_hole = 1;
return;
}
/* must have both nodes[0] and [1], interior hole */
iter->start_hole = iter->nodes[0]->last + 1;
iter->last_hole = iter->nodes[1]->start - 1;
iter->is_hole = 1;
interval_tree_span_iter_next_gap(iter);
}
EXPORT_SYMBOL_GPL(interval_tree_span_iter_next);
/*
* Advance the iterator index to a specific position. The returned used/hole is
* updated to start at new_index. This is faster than calling
* interval_tree_span_iter_first() as it can avoid full searches in several
* cases where the iterator is already set.
*/
void interval_tree_span_iter_advance(struct interval_tree_span_iter *iter,
struct rb_root_cached *itree,
unsigned long new_index)
{
if (iter->is_hole == -1)
return;
iter->first_index = new_index;
if (new_index > iter->last_index) {
iter->is_hole = -1;
return;
}
/* Rely on the union aliasing hole/used */
if (iter->start_hole <= new_index && new_index <= iter->last_hole) {
iter->start_hole = new_index;
return;
}
if (new_index == iter->last_hole + 1)
interval_tree_span_iter_next(iter);
else
interval_tree_span_iter_first(iter, itree, new_index,
iter->last_index);
}
EXPORT_SYMBOL_GPL(interval_tree_span_iter_advance);
#endif
|