* [PATCH for-8.1 v2 1/4] util/interval-tree: Use qatomic_read for left/right while searching
2023-07-22 21:44 [PATCH v2 0/4] util/interval-tree: Avoid race conditions without optimization Richard Henderson
@ 2023-07-22 21:44 ` Richard Henderson
2023-07-24 11:48 ` Peter Maydell
2023-07-22 21:44 ` [PATCH for-8.1 v2 2/4] util/interval-tree: Use qatomic_set_mb in rb_link_node Richard Henderson
` (2 subsequent siblings)
3 siblings, 1 reply; 9+ messages in thread
From: Richard Henderson @ 2023-07-22 21:44 UTC (permalink / raw)
To: qemu-devel; +Cc: peter.maydell, qemu-stable
Fixes a race condition (generally without optimization) in which
the subtree is re-read after the protecting if condition.
Cc: qemu-stable@nongnu.org
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
util/interval-tree.c | 15 +++++++++------
1 file changed, 9 insertions(+), 6 deletions(-)
diff --git a/util/interval-tree.c b/util/interval-tree.c
index 4c0baf108f..5a0ad21b2d 100644
--- a/util/interval-tree.c
+++ b/util/interval-tree.c
@@ -745,8 +745,9 @@ static IntervalTreeNode *interval_tree_subtree_search(IntervalTreeNode *node,
* Loop invariant: start <= node->subtree_last
* (Cond2 is satisfied by one of the subtree nodes)
*/
- if (node->rb.rb_left) {
- IntervalTreeNode *left = rb_to_itree(node->rb.rb_left);
+ RBNode *tmp = qatomic_read(&node->rb.rb_left);
+ if (tmp) {
+ IntervalTreeNode *left = rb_to_itree(tmp);
if (start <= left->subtree_last) {
/*
@@ -765,8 +766,9 @@ static IntervalTreeNode *interval_tree_subtree_search(IntervalTreeNode *node,
if (start <= node->last) { /* Cond2 */
return node; /* node is leftmost match */
}
- if (node->rb.rb_right) {
- node = rb_to_itree(node->rb.rb_right);
+ tmp = qatomic_read(&node->rb.rb_right);
+ if (tmp) {
+ node = rb_to_itree(tmp);
if (start <= node->subtree_last) {
continue;
}
@@ -814,8 +816,9 @@ IntervalTreeNode *interval_tree_iter_first(IntervalTreeRoot *root,
IntervalTreeNode *interval_tree_iter_next(IntervalTreeNode *node,
uint64_t start, uint64_t last)
{
- RBNode *rb = node->rb.rb_right, *prev;
+ RBNode *rb, *prev;
+ rb = qatomic_read(&node->rb.rb_right);
while (true) {
/*
* Loop invariants:
@@ -840,7 +843,7 @@ IntervalTreeNode *interval_tree_iter_next(IntervalTreeNode *node,
}
prev = &node->rb;
node = rb_to_itree(rb);
- rb = node->rb.rb_right;
+ rb = qatomic_read(&node->rb.rb_right);
} while (prev == rb);
/* Check if the node intersects [start;last] */
--
2.34.1
^ permalink raw reply related [flat|nested] 9+ messages in thread
* [PATCH for-8.2? v2 3/4] util/interval-tree: Introduce pc_parent
2023-07-22 21:44 [PATCH v2 0/4] util/interval-tree: Avoid race conditions without optimization Richard Henderson
2023-07-22 21:44 ` [PATCH for-8.1 v2 1/4] util/interval-tree: Use qatomic_read for left/right while searching Richard Henderson
2023-07-22 21:44 ` [PATCH for-8.1 v2 2/4] util/interval-tree: Use qatomic_set_mb in rb_link_node Richard Henderson
@ 2023-07-22 21:44 ` Richard Henderson
2023-07-24 11:51 ` Peter Maydell
2023-07-22 21:44 ` [PATCH for-8.2? v2 4/4] util/interval-tree: Use qatomic_read/set for rb_parent_color Richard Henderson
3 siblings, 1 reply; 9+ messages in thread
From: Richard Henderson @ 2023-07-22 21:44 UTC (permalink / raw)
To: qemu-devel; +Cc: peter.maydell
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
util/interval-tree.c | 13 +++++++++----
1 file changed, 9 insertions(+), 4 deletions(-)
diff --git a/util/interval-tree.c b/util/interval-tree.c
index 759562db7d..d86c0752db 100644
--- a/util/interval-tree.c
+++ b/util/interval-tree.c
@@ -68,9 +68,14 @@ typedef struct RBAugmentCallbacks {
void (*rotate)(RBNode *old, RBNode *new);
} RBAugmentCallbacks;
+static inline RBNode *pc_parent(uintptr_t pc)
+{
+ return (RBNode *)(pc & ~1);
+}
+
static inline RBNode *rb_parent(const RBNode *n)
{
- return (RBNode *)(n->rb_parent_color & ~1);
+ return pc_parent(n->rb_parent_color);
}
static inline RBNode *rb_red_parent(const RBNode *n)
@@ -532,7 +537,7 @@ static void rb_erase_augmented(RBNode *node, RBRoot *root,
* so as to bypass rb_erase_color() later on.
*/
pc = node->rb_parent_color;
- parent = rb_parent(node);
+ parent = pc_parent(pc);
rb_change_child(node, child, parent, root);
if (child) {
child->rb_parent_color = pc;
@@ -544,7 +549,7 @@ static void rb_erase_augmented(RBNode *node, RBRoot *root,
} else if (!child) {
/* Still case 1, but this time the child is node->rb_left */
pc = node->rb_parent_color;
- parent = rb_parent(node);
+ parent = pc_parent(pc);
tmp->rb_parent_color = pc;
rb_change_child(node, tmp, parent, root);
rebalance = NULL;
@@ -600,7 +605,7 @@ static void rb_erase_augmented(RBNode *node, RBRoot *root,
rb_set_parent(tmp, successor);
pc = node->rb_parent_color;
- tmp = rb_parent(node);
+ tmp = pc_parent(pc);
rb_change_child(node, successor, tmp, root);
if (child2) {
--
2.34.1
^ permalink raw reply related [flat|nested] 9+ messages in thread
* [PATCH for-8.2? v2 4/4] util/interval-tree: Use qatomic_read/set for rb_parent_color
2023-07-22 21:44 [PATCH v2 0/4] util/interval-tree: Avoid race conditions without optimization Richard Henderson
` (2 preceding siblings ...)
2023-07-22 21:44 ` [PATCH for-8.2? v2 3/4] util/interval-tree: Introduce pc_parent Richard Henderson
@ 2023-07-22 21:44 ` Richard Henderson
2023-07-24 11:53 ` Peter Maydell
3 siblings, 1 reply; 9+ messages in thread
From: Richard Henderson @ 2023-07-22 21:44 UTC (permalink / raw)
To: qemu-devel; +Cc: peter.maydell
While less susceptible to optimization problems than left and right,
interval_tree_iter_next also reads rb_parent(), so make sure that
stores and loads are atomic.
This goes further than technically required, changing all loads to
be atomic, rather than simply the ones in the iteration side. But
it doesn't really affect the code generation on the rebalance side
and is cleaner to handle everything the same.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
util/interval-tree.c | 47 ++++++++++++++++++++++++--------------------
1 file changed, 26 insertions(+), 21 deletions(-)
diff --git a/util/interval-tree.c b/util/interval-tree.c
index d86c0752db..f2866aa7d3 100644
--- a/util/interval-tree.c
+++ b/util/interval-tree.c
@@ -48,12 +48,6 @@
*
* It also guarantees that if the lookup returns an element it is the 'correct'
* one. But not returning an element does _NOT_ mean it's not present.
- *
- * NOTE:
- *
- * Stores to __rb_parent_color are not important for simple lookups so those
- * are left undone as of now. Nor did I check for loops involving parent
- * pointers.
*/
typedef enum RBColor
@@ -68,6 +62,16 @@ typedef struct RBAugmentCallbacks {
void (*rotate)(RBNode *old, RBNode *new);
} RBAugmentCallbacks;
+static inline uintptr_t rb_pc(const RBNode *n)
+{
+ return qatomic_read(&n->rb_parent_color);
+}
+
+static inline void rb_set_pc(RBNode *n, uintptr_t pc)
+{
+ qatomic_set(&n->rb_parent_color, pc);
+}
+
static inline RBNode *pc_parent(uintptr_t pc)
{
return (RBNode *)(pc & ~1);
@@ -75,12 +79,12 @@ static inline RBNode *pc_parent(uintptr_t pc)
static inline RBNode *rb_parent(const RBNode *n)
{
- return pc_parent(n->rb_parent_color);
+ return pc_parent(rb_pc(n));
}
static inline RBNode *rb_red_parent(const RBNode *n)
{
- return (RBNode *)n->rb_parent_color;
+ return (RBNode *)rb_pc(n);
}
static inline RBColor pc_color(uintptr_t pc)
@@ -100,27 +104,27 @@ static inline bool pc_is_black(uintptr_t pc)
static inline RBColor rb_color(const RBNode *n)
{
- return pc_color(n->rb_parent_color);
+ return pc_color(rb_pc(n));
}
static inline bool rb_is_red(const RBNode *n)
{
- return pc_is_red(n->rb_parent_color);
+ return pc_is_red(rb_pc(n));
}
static inline bool rb_is_black(const RBNode *n)
{
- return pc_is_black(n->rb_parent_color);
+ return pc_is_black(rb_pc(n));
}
static inline void rb_set_black(RBNode *n)
{
- n->rb_parent_color |= RB_BLACK;
+ rb_set_pc(n, rb_pc(n) | RB_BLACK);
}
static inline void rb_set_parent_color(RBNode *n, RBNode *p, RBColor color)
{
- n->rb_parent_color = (uintptr_t)p | color;
+ rb_set_pc(n, (uintptr_t)p | color);
}
static inline void rb_set_parent(RBNode *n, RBNode *p)
@@ -186,9 +190,10 @@ static inline void rb_change_child(RBNode *old, RBNode *new,
static inline void rb_rotate_set_parents(RBNode *old, RBNode *new,
RBRoot *root, RBColor color)
{
- RBNode *parent = rb_parent(old);
+ uintptr_t pc = rb_pc(old);
+ RBNode *parent = pc_parent(pc);
- new->rb_parent_color = old->rb_parent_color;
+ rb_set_pc(new, pc);
rb_set_parent_color(old, new, color);
rb_change_child(old, new, parent, root);
}
@@ -536,11 +541,11 @@ static void rb_erase_augmented(RBNode *node, RBRoot *root,
* and node must be black due to 4). We adjust colors locally
* so as to bypass rb_erase_color() later on.
*/
- pc = node->rb_parent_color;
+ pc = rb_pc(node);
parent = pc_parent(pc);
rb_change_child(node, child, parent, root);
if (child) {
- child->rb_parent_color = pc;
+ rb_set_pc(child, pc);
rebalance = NULL;
} else {
rebalance = pc_is_black(pc) ? parent : NULL;
@@ -548,9 +553,9 @@ static void rb_erase_augmented(RBNode *node, RBRoot *root,
tmp = parent;
} else if (!child) {
/* Still case 1, but this time the child is node->rb_left */
- pc = node->rb_parent_color;
+ pc = rb_pc(node);
parent = pc_parent(pc);
- tmp->rb_parent_color = pc;
+ rb_set_pc(tmp, pc);
rb_change_child(node, tmp, parent, root);
rebalance = NULL;
tmp = parent;
@@ -604,7 +609,7 @@ static void rb_erase_augmented(RBNode *node, RBRoot *root,
qatomic_set(&successor->rb_left, tmp);
rb_set_parent(tmp, successor);
- pc = node->rb_parent_color;
+ pc = rb_pc(node);
tmp = pc_parent(pc);
rb_change_child(node, successor, tmp, root);
@@ -614,7 +619,7 @@ static void rb_erase_augmented(RBNode *node, RBRoot *root,
} else {
rebalance = rb_is_black(successor) ? parent : NULL;
}
- successor->rb_parent_color = pc;
+ rb_set_pc(successor, pc);
tmp = successor;
}
--
2.34.1
^ permalink raw reply related [flat|nested] 9+ messages in thread