mirror of https://gitee.com/openkylin/linux.git
augmented rbtree: add new RB_DECLARE_CALLBACKS_MAX macro
Add RB_DECLARE_CALLBACKS_MAX, which generates augmented rbtree callbacks for the case where the augmented value is a scalar whose definition follows a max(f(node)) pattern. This actually covers all present uses of RB_DECLARE_CALLBACKS, and saves some (source) code duplication in the various RBCOMPUTE function definitions. [walken@google.com: fix mm/vmalloc.c] Link: http://lkml.kernel.org/r/CANN689FXgK13wDYNh1zKxdipeTuALG4eKvKpsdZqKFJ-rvtGiQ@mail.gmail.com [walken@google.com: re-add check to check_augmented()] Link: http://lkml.kernel.org/r/20190727022027.GA86863@google.com Link: http://lkml.kernel.org/r/20190703040156.56953-3-walken@google.com Signed-off-by: Michel Lespinasse <walken@google.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: David Howells <dhowells@redhat.com> Cc: Davidlohr Bueso <dbueso@suse.de> Cc: Uladzislau Rezki <urezki@gmail.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
parent
444b8a83f1
commit
315cc066b8
|
@ -54,23 +54,10 @@ static u64 get_subtree_max_end(struct rb_node *node)
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static u64 compute_subtree_max_end(struct memtype *data)
|
#define NODE_END(node) ((node)->end)
|
||||||
{
|
|
||||||
u64 max_end = data->end, child_max_end;
|
|
||||||
|
|
||||||
child_max_end = get_subtree_max_end(data->rb.rb_right);
|
RB_DECLARE_CALLBACKS_MAX(static, memtype_rb_augment_cb,
|
||||||
if (child_max_end > max_end)
|
struct memtype, rb, u64, subtree_max_end, NODE_END)
|
||||||
max_end = child_max_end;
|
|
||||||
|
|
||||||
child_max_end = get_subtree_max_end(data->rb.rb_left);
|
|
||||||
if (child_max_end > max_end)
|
|
||||||
max_end = child_max_end;
|
|
||||||
|
|
||||||
return max_end;
|
|
||||||
}
|
|
||||||
|
|
||||||
RB_DECLARE_CALLBACKS(static, memtype_rb_augment_cb, struct memtype, rb,
|
|
||||||
u64, subtree_max_end, compute_subtree_max_end)
|
|
||||||
|
|
||||||
/* Find the first (lowest start addr) overlapping range from rb tree */
|
/* Find the first (lowest start addr) overlapping range from rb tree */
|
||||||
static struct memtype *memtype_rb_lowest_match(struct rb_root *root,
|
static struct memtype *memtype_rb_lowest_match(struct rb_root *root,
|
||||||
|
|
|
@ -13,33 +13,10 @@ sector_t interval_end(struct rb_node *node)
|
||||||
return this->end;
|
return this->end;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
#define NODE_END(node) ((node)->sector + ((node)->size >> 9))
|
||||||
* compute_subtree_last - compute end of @node
|
|
||||||
*
|
|
||||||
* The end of an interval is the highest (start + (size >> 9)) value of this
|
|
||||||
* node and of its children. Called for @node and its parents whenever the end
|
|
||||||
* may have changed.
|
|
||||||
*/
|
|
||||||
static inline sector_t
|
|
||||||
compute_subtree_last(struct drbd_interval *node)
|
|
||||||
{
|
|
||||||
sector_t max = node->sector + (node->size >> 9);
|
|
||||||
|
|
||||||
if (node->rb.rb_left) {
|
RB_DECLARE_CALLBACKS_MAX(static, augment_callbacks,
|
||||||
sector_t left = interval_end(node->rb.rb_left);
|
struct drbd_interval, rb, sector_t, end, NODE_END);
|
||||||
if (left > max)
|
|
||||||
max = left;
|
|
||||||
}
|
|
||||||
if (node->rb.rb_right) {
|
|
||||||
sector_t right = interval_end(node->rb.rb_right);
|
|
||||||
if (right > max)
|
|
||||||
max = right;
|
|
||||||
}
|
|
||||||
return max;
|
|
||||||
}
|
|
||||||
|
|
||||||
RB_DECLARE_CALLBACKS(static, augment_callbacks, struct drbd_interval, rb,
|
|
||||||
sector_t, end, compute_subtree_last);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* drbd_insert_interval - insert a new interval into a tree
|
* drbd_insert_interval - insert a new interval into a tree
|
||||||
|
|
|
@ -30,26 +30,8 @@
|
||||||
\
|
\
|
||||||
/* Callbacks for augmented rbtree insert and remove */ \
|
/* Callbacks for augmented rbtree insert and remove */ \
|
||||||
\
|
\
|
||||||
static inline ITTYPE ITPREFIX ## _compute_subtree_last(ITSTRUCT *node) \
|
RB_DECLARE_CALLBACKS_MAX(static, ITPREFIX ## _augment, \
|
||||||
{ \
|
ITSTRUCT, ITRB, ITTYPE, ITSUBTREE, ITLAST) \
|
||||||
ITTYPE max = ITLAST(node), subtree_last; \
|
|
||||||
if (node->ITRB.rb_left) { \
|
|
||||||
subtree_last = rb_entry(node->ITRB.rb_left, \
|
|
||||||
ITSTRUCT, ITRB)->ITSUBTREE; \
|
|
||||||
if (max < subtree_last) \
|
|
||||||
max = subtree_last; \
|
|
||||||
} \
|
|
||||||
if (node->ITRB.rb_right) { \
|
|
||||||
subtree_last = rb_entry(node->ITRB.rb_right, \
|
|
||||||
ITSTRUCT, ITRB)->ITSUBTREE; \
|
|
||||||
if (max < subtree_last) \
|
|
||||||
max = subtree_last; \
|
|
||||||
} \
|
|
||||||
return max; \
|
|
||||||
} \
|
|
||||||
\
|
|
||||||
RB_DECLARE_CALLBACKS(static, ITPREFIX ## _augment, ITSTRUCT, ITRB, \
|
|
||||||
ITTYPE, ITSUBTREE, ITPREFIX ## _compute_subtree_last) \
|
|
||||||
\
|
\
|
||||||
/* Insert / remove interval nodes from the tree */ \
|
/* Insert / remove interval nodes from the tree */ \
|
||||||
\
|
\
|
||||||
|
|
|
@ -61,7 +61,7 @@ rb_insert_augmented_cached(struct rb_node *node,
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Template for declaring augmented rbtree callbacks
|
* Template for declaring augmented rbtree callbacks (generic case)
|
||||||
*
|
*
|
||||||
* RBSTATIC: 'static' or empty
|
* RBSTATIC: 'static' or empty
|
||||||
* RBNAME: name of the rb_augment_callbacks structure
|
* RBNAME: name of the rb_augment_callbacks structure
|
||||||
|
@ -107,6 +107,40 @@ RBSTATIC const struct rb_augment_callbacks RBNAME = { \
|
||||||
.rotate = RBNAME ## _rotate \
|
.rotate = RBNAME ## _rotate \
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Template for declaring augmented rbtree callbacks,
|
||||||
|
* computing RBAUGMENTED scalar as max(RBCOMPUTE(node)) for all subtree nodes.
|
||||||
|
*
|
||||||
|
* RBSTATIC: 'static' or empty
|
||||||
|
* RBNAME: name of the rb_augment_callbacks structure
|
||||||
|
* RBSTRUCT: struct type of the tree nodes
|
||||||
|
* RBFIELD: name of struct rb_node field within RBSTRUCT
|
||||||
|
* RBTYPE: type of the RBAUGMENTED field
|
||||||
|
* RBAUGMENTED: name of RBTYPE field within RBSTRUCT holding data for subtree
|
||||||
|
* RBCOMPUTE: name of function that returns the per-node RBTYPE scalar
|
||||||
|
*/
|
||||||
|
|
||||||
|
#define RB_DECLARE_CALLBACKS_MAX(RBSTATIC, RBNAME, RBSTRUCT, RBFIELD, \
|
||||||
|
RBTYPE, RBAUGMENTED, RBCOMPUTE) \
|
||||||
|
static inline RBTYPE RBNAME ## _compute_max(RBSTRUCT *node) \
|
||||||
|
{ \
|
||||||
|
RBSTRUCT *child; \
|
||||||
|
RBTYPE max = RBCOMPUTE(node); \
|
||||||
|
if (node->RBFIELD.rb_left) { \
|
||||||
|
child = rb_entry(node->RBFIELD.rb_left, RBSTRUCT, RBFIELD); \
|
||||||
|
if (child->RBAUGMENTED > max) \
|
||||||
|
max = child->RBAUGMENTED; \
|
||||||
|
} \
|
||||||
|
if (node->RBFIELD.rb_right) { \
|
||||||
|
child = rb_entry(node->RBFIELD.rb_right, RBSTRUCT, RBFIELD); \
|
||||||
|
if (child->RBAUGMENTED > max) \
|
||||||
|
max = child->RBAUGMENTED; \
|
||||||
|
} \
|
||||||
|
return max; \
|
||||||
|
} \
|
||||||
|
RB_DECLARE_CALLBACKS(RBSTATIC, RBNAME, RBSTRUCT, RBFIELD, \
|
||||||
|
RBTYPE, RBAUGMENTED, RBNAME ## _compute_max)
|
||||||
|
|
||||||
|
|
||||||
#define RB_RED 0
|
#define RB_RED 0
|
||||||
#define RB_BLACK 1
|
#define RB_BLACK 1
|
||||||
|
|
|
@ -77,26 +77,10 @@ static inline void erase_cached(struct test_node *node, struct rb_root_cached *r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static inline u32 augment_recompute(struct test_node *node)
|
#define NODE_VAL(node) ((node)->val)
|
||||||
{
|
|
||||||
u32 max = node->val, child_augmented;
|
|
||||||
if (node->rb.rb_left) {
|
|
||||||
child_augmented = rb_entry(node->rb.rb_left, struct test_node,
|
|
||||||
rb)->augmented;
|
|
||||||
if (max < child_augmented)
|
|
||||||
max = child_augmented;
|
|
||||||
}
|
|
||||||
if (node->rb.rb_right) {
|
|
||||||
child_augmented = rb_entry(node->rb.rb_right, struct test_node,
|
|
||||||
rb)->augmented;
|
|
||||||
if (max < child_augmented)
|
|
||||||
max = child_augmented;
|
|
||||||
}
|
|
||||||
return max;
|
|
||||||
}
|
|
||||||
|
|
||||||
RB_DECLARE_CALLBACKS(static, augment_callbacks, struct test_node, rb,
|
RB_DECLARE_CALLBACKS_MAX(static, augment_callbacks,
|
||||||
u32, augmented, augment_recompute)
|
struct test_node, rb, u32, augmented, NODE_VAL)
|
||||||
|
|
||||||
static void insert_augmented(struct test_node *node,
|
static void insert_augmented(struct test_node *node,
|
||||||
struct rb_root_cached *root)
|
struct rb_root_cached *root)
|
||||||
|
@ -238,7 +222,20 @@ static void check_augmented(int nr_nodes)
|
||||||
check(nr_nodes);
|
check(nr_nodes);
|
||||||
for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) {
|
for (rb = rb_first(&root.rb_root); rb; rb = rb_next(rb)) {
|
||||||
struct test_node *node = rb_entry(rb, struct test_node, rb);
|
struct test_node *node = rb_entry(rb, struct test_node, rb);
|
||||||
WARN_ON_ONCE(node->augmented != augment_recompute(node));
|
u32 subtree, max = node->val;
|
||||||
|
if (node->rb.rb_left) {
|
||||||
|
subtree = rb_entry(node->rb.rb_left, struct test_node,
|
||||||
|
rb)->augmented;
|
||||||
|
if (max < subtree)
|
||||||
|
max = subtree;
|
||||||
|
}
|
||||||
|
if (node->rb.rb_right) {
|
||||||
|
subtree = rb_entry(node->rb.rb_right, struct test_node,
|
||||||
|
rb)->augmented;
|
||||||
|
if (max < subtree)
|
||||||
|
max = subtree;
|
||||||
|
}
|
||||||
|
WARN_ON_ONCE(node->augmented != max);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
29
mm/mmap.c
29
mm/mmap.c
|
@ -289,9 +289,9 @@ SYSCALL_DEFINE1(brk, unsigned long, brk)
|
||||||
return retval;
|
return retval;
|
||||||
}
|
}
|
||||||
|
|
||||||
static long vma_compute_subtree_gap(struct vm_area_struct *vma)
|
static inline unsigned long vma_compute_gap(struct vm_area_struct *vma)
|
||||||
{
|
{
|
||||||
unsigned long max, prev_end, subtree_gap;
|
unsigned long gap, prev_end;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Note: in the rare case of a VM_GROWSDOWN above a VM_GROWSUP, we
|
* Note: in the rare case of a VM_GROWSDOWN above a VM_GROWSUP, we
|
||||||
|
@ -299,14 +299,21 @@ static long vma_compute_subtree_gap(struct vm_area_struct *vma)
|
||||||
* an unmapped area; whereas when expanding we only require one.
|
* an unmapped area; whereas when expanding we only require one.
|
||||||
* That's a little inconsistent, but keeps the code here simpler.
|
* That's a little inconsistent, but keeps the code here simpler.
|
||||||
*/
|
*/
|
||||||
max = vm_start_gap(vma);
|
gap = vm_start_gap(vma);
|
||||||
if (vma->vm_prev) {
|
if (vma->vm_prev) {
|
||||||
prev_end = vm_end_gap(vma->vm_prev);
|
prev_end = vm_end_gap(vma->vm_prev);
|
||||||
if (max > prev_end)
|
if (gap > prev_end)
|
||||||
max -= prev_end;
|
gap -= prev_end;
|
||||||
else
|
else
|
||||||
max = 0;
|
gap = 0;
|
||||||
}
|
}
|
||||||
|
return gap;
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef CONFIG_DEBUG_VM_RB
|
||||||
|
static unsigned long vma_compute_subtree_gap(struct vm_area_struct *vma)
|
||||||
|
{
|
||||||
|
unsigned long max = vma_compute_gap(vma), subtree_gap;
|
||||||
if (vma->vm_rb.rb_left) {
|
if (vma->vm_rb.rb_left) {
|
||||||
subtree_gap = rb_entry(vma->vm_rb.rb_left,
|
subtree_gap = rb_entry(vma->vm_rb.rb_left,
|
||||||
struct vm_area_struct, vm_rb)->rb_subtree_gap;
|
struct vm_area_struct, vm_rb)->rb_subtree_gap;
|
||||||
|
@ -322,7 +329,6 @@ static long vma_compute_subtree_gap(struct vm_area_struct *vma)
|
||||||
return max;
|
return max;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef CONFIG_DEBUG_VM_RB
|
|
||||||
static int browse_rb(struct mm_struct *mm)
|
static int browse_rb(struct mm_struct *mm)
|
||||||
{
|
{
|
||||||
struct rb_root *root = &mm->mm_rb;
|
struct rb_root *root = &mm->mm_rb;
|
||||||
|
@ -428,8 +434,9 @@ static void validate_mm(struct mm_struct *mm)
|
||||||
#define validate_mm(mm) do { } while (0)
|
#define validate_mm(mm) do { } while (0)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
RB_DECLARE_CALLBACKS(static, vma_gap_callbacks, struct vm_area_struct, vm_rb,
|
RB_DECLARE_CALLBACKS_MAX(static, vma_gap_callbacks,
|
||||||
unsigned long, rb_subtree_gap, vma_compute_subtree_gap)
|
struct vm_area_struct, vm_rb,
|
||||||
|
unsigned long, rb_subtree_gap, vma_compute_gap)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Update augmented rbtree rb_subtree_gap values after vma->vm_start or
|
* Update augmented rbtree rb_subtree_gap values after vma->vm_start or
|
||||||
|
@ -439,8 +446,8 @@ RB_DECLARE_CALLBACKS(static, vma_gap_callbacks, struct vm_area_struct, vm_rb,
|
||||||
static void vma_gap_update(struct vm_area_struct *vma)
|
static void vma_gap_update(struct vm_area_struct *vma)
|
||||||
{
|
{
|
||||||
/*
|
/*
|
||||||
* As it turns out, RB_DECLARE_CALLBACKS() already created a callback
|
* As it turns out, RB_DECLARE_CALLBACKS_MAX() already created
|
||||||
* function that does exactly what we want.
|
* a callback function that does exactly what we want.
|
||||||
*/
|
*/
|
||||||
vma_gap_callbacks_propagate(&vma->vm_rb, NULL);
|
vma_gap_callbacks_propagate(&vma->vm_rb, NULL);
|
||||||
}
|
}
|
||||||
|
|
|
@ -396,9 +396,8 @@ compute_subtree_max_size(struct vmap_area *va)
|
||||||
get_subtree_max_size(va->rb_node.rb_right));
|
get_subtree_max_size(va->rb_node.rb_right));
|
||||||
}
|
}
|
||||||
|
|
||||||
RB_DECLARE_CALLBACKS(static, free_vmap_area_rb_augment_cb,
|
RB_DECLARE_CALLBACKS_MAX(static, free_vmap_area_rb_augment_cb,
|
||||||
struct vmap_area, rb_node, unsigned long, subtree_max_size,
|
struct vmap_area, rb_node, unsigned long, subtree_max_size, va_size)
|
||||||
compute_subtree_max_size)
|
|
||||||
|
|
||||||
static void purge_vmap_area_lazy(void);
|
static void purge_vmap_area_lazy(void);
|
||||||
static BLOCKING_NOTIFIER_HEAD(vmap_notify_list);
|
static BLOCKING_NOTIFIER_HEAD(vmap_notify_list);
|
||||||
|
|
|
@ -63,7 +63,7 @@ rb_insert_augmented_cached(struct rb_node *node,
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Template for declaring augmented rbtree callbacks
|
* Template for declaring augmented rbtree callbacks (generic case)
|
||||||
*
|
*
|
||||||
* RBSTATIC: 'static' or empty
|
* RBSTATIC: 'static' or empty
|
||||||
* RBNAME: name of the rb_augment_callbacks structure
|
* RBNAME: name of the rb_augment_callbacks structure
|
||||||
|
@ -109,6 +109,40 @@ RBSTATIC const struct rb_augment_callbacks RBNAME = { \
|
||||||
.rotate = RBNAME ## _rotate \
|
.rotate = RBNAME ## _rotate \
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Template for declaring augmented rbtree callbacks,
|
||||||
|
* computing RBAUGMENTED scalar as max(RBCOMPUTE(node)) for all subtree nodes.
|
||||||
|
*
|
||||||
|
* RBSTATIC: 'static' or empty
|
||||||
|
* RBNAME: name of the rb_augment_callbacks structure
|
||||||
|
* RBSTRUCT: struct type of the tree nodes
|
||||||
|
* RBFIELD: name of struct rb_node field within RBSTRUCT
|
||||||
|
* RBTYPE: type of the RBAUGMENTED field
|
||||||
|
* RBAUGMENTED: name of RBTYPE field within RBSTRUCT holding data for subtree
|
||||||
|
* RBCOMPUTE: name of function that returns the per-node RBTYPE scalar
|
||||||
|
*/
|
||||||
|
|
||||||
|
#define RB_DECLARE_CALLBACKS_MAX(RBSTATIC, RBNAME, RBSTRUCT, RBFIELD, \
|
||||||
|
RBTYPE, RBAUGMENTED, RBCOMPUTE) \
|
||||||
|
static inline RBTYPE RBNAME ## _compute_max(RBSTRUCT *node) \
|
||||||
|
{ \
|
||||||
|
RBSTRUCT *child; \
|
||||||
|
RBTYPE max = RBCOMPUTE(node); \
|
||||||
|
if (node->RBFIELD.rb_left) { \
|
||||||
|
child = rb_entry(node->RBFIELD.rb_left, RBSTRUCT, RBFIELD); \
|
||||||
|
if (child->RBAUGMENTED > max) \
|
||||||
|
max = child->RBAUGMENTED; \
|
||||||
|
} \
|
||||||
|
if (node->RBFIELD.rb_right) { \
|
||||||
|
child = rb_entry(node->RBFIELD.rb_right, RBSTRUCT, RBFIELD); \
|
||||||
|
if (child->RBAUGMENTED > max) \
|
||||||
|
max = child->RBAUGMENTED; \
|
||||||
|
} \
|
||||||
|
return max; \
|
||||||
|
} \
|
||||||
|
RB_DECLARE_CALLBACKS(RBSTATIC, RBNAME, RBSTRUCT, RBFIELD, \
|
||||||
|
RBTYPE, RBAUGMENTED, RBNAME ## _compute_max)
|
||||||
|
|
||||||
|
|
||||||
#define RB_RED 0
|
#define RB_RED 0
|
||||||
#define RB_BLACK 1
|
#define RB_BLACK 1
|
||||||
|
|
Loading…
Reference in New Issue