1
0
Fork 0

drm: Use drm_mm_insert_node_in_range_generic() for everyone

Remove a superfluous helper as drm_mm_insert_node is equivalent to
insert_node_in_range with a range of [0, U64_MAX].

Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: Joonas Lahtinen <joonas.lahtinen@linux.intel.com>
Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
Link: http://patchwork.freedesktop.org/patch/msgid/20161222083641.2691-37-chris@chris-wilson.co.uk
hifive-unleashed-5.1
Chris Wilson 2016-12-22 08:36:39 +00:00 committed by Daniel Vetter
parent 3db93756b5
commit adb040b86b
2 changed files with 67 additions and 189 deletions

View File

@ -92,11 +92,6 @@
* some basic allocator dumpers for debugging.
*/
static struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
u64 size,
u64 alignment,
unsigned long color,
enum drm_mm_search_flags flags);
static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
u64 size,
u64 alignment,
@ -230,6 +225,7 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
struct drm_mm_node *node,
u64 size, u64 alignment,
unsigned long color,
u64 range_start, u64 range_end,
enum drm_mm_allocator_flags flags)
{
struct drm_mm *mm = hole_node->mm;
@ -238,11 +234,14 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
u64 adj_start = hole_start;
u64 adj_end = hole_end;
DRM_MM_BUG_ON(node->allocated);
DRM_MM_BUG_ON(!drm_mm_hole_follows(hole_node) || node->allocated);
if (mm->color_adjust)
mm->color_adjust(hole_node, color, &adj_start, &adj_end);
adj_start = max(adj_start, range_start);
adj_end = min(adj_end, range_end);
if (flags & DRM_MM_CREATE_TOP)
adj_start = adj_end - size;
@ -258,9 +257,6 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
}
}
DRM_MM_BUG_ON(adj_start < hole_start);
DRM_MM_BUG_ON(adj_end > hole_end);
if (adj_start == hole_start) {
hole_node->hole_follows = 0;
list_del(&hole_node->hole_stack);
@ -276,7 +272,10 @@ static void drm_mm_insert_helper(struct drm_mm_node *hole_node,
drm_mm_interval_tree_add_node(hole_node, node);
DRM_MM_BUG_ON(node->start < range_start);
DRM_MM_BUG_ON(node->start < adj_start);
DRM_MM_BUG_ON(node->start + node->size > adj_end);
DRM_MM_BUG_ON(node->start + node->size > range_end);
node->hole_follows = 0;
if (__drm_mm_hole_node_start(node) < hole_end) {
@ -359,107 +358,6 @@ int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node)
}
EXPORT_SYMBOL(drm_mm_reserve_node);
/**
* drm_mm_insert_node_generic - search for space and insert @node
* @mm: drm_mm to allocate from
* @node: preallocate node to insert
* @size: size of the allocation
* @alignment: alignment of the allocation
* @color: opaque tag value to use for this node
* @sflags: flags to fine-tune the allocation search
* @aflags: flags to fine-tune the allocation behavior
*
* The preallocated node must be cleared to 0.
*
* Returns:
* 0 on success, -ENOSPC if there's no suitable hole.
*/
int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
u64 size, u64 alignment,
unsigned long color,
enum drm_mm_search_flags sflags,
enum drm_mm_allocator_flags aflags)
{
struct drm_mm_node *hole_node;
if (WARN_ON(size == 0))
return -EINVAL;
hole_node = drm_mm_search_free_generic(mm, size, alignment,
color, sflags);
if (!hole_node)
return -ENOSPC;
drm_mm_insert_helper(hole_node, node, size, alignment, color, aflags);
return 0;
}
EXPORT_SYMBOL(drm_mm_insert_node_generic);
static void drm_mm_insert_helper_range(struct drm_mm_node *hole_node,
struct drm_mm_node *node,
u64 size, u64 alignment,
unsigned long color,
u64 start, u64 end,
enum drm_mm_allocator_flags flags)
{
struct drm_mm *mm = hole_node->mm;
u64 hole_start = drm_mm_hole_node_start(hole_node);
u64 hole_end = drm_mm_hole_node_end(hole_node);
u64 adj_start = hole_start;
u64 adj_end = hole_end;
DRM_MM_BUG_ON(!drm_mm_hole_follows(hole_node) || node->allocated);
if (mm->color_adjust)
mm->color_adjust(hole_node, color, &adj_start, &adj_end);
adj_start = max(adj_start, start);
adj_end = min(adj_end, end);
if (flags & DRM_MM_CREATE_TOP)
adj_start = adj_end - size;
if (alignment) {
u64 rem;
div64_u64_rem(adj_start, alignment, &rem);
if (rem) {
if (flags & DRM_MM_CREATE_TOP)
adj_start -= rem;
else
adj_start += alignment - rem;
}
}
if (adj_start == hole_start) {
hole_node->hole_follows = 0;
list_del(&hole_node->hole_stack);
}
node->start = adj_start;
node->size = size;
node->mm = mm;
node->color = color;
node->allocated = 1;
list_add(&node->node_list, &hole_node->node_list);
drm_mm_interval_tree_add_node(hole_node, node);
DRM_MM_BUG_ON(node->start < start);
DRM_MM_BUG_ON(node->start < adj_start);
DRM_MM_BUG_ON(node->start + node->size > adj_end);
DRM_MM_BUG_ON(node->start + node->size > end);
node->hole_follows = 0;
if (__drm_mm_hole_node_start(node) < hole_end) {
list_add(&node->hole_stack, &mm->hole_stack);
node->hole_follows = 1;
}
save_stack(node);
}
/**
* drm_mm_insert_node_in_range_generic - ranged search for space and insert @node
* @mm: drm_mm to allocate from
@ -495,9 +393,9 @@ int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *n
if (!hole_node)
return -ENOSPC;
drm_mm_insert_helper_range(hole_node, node,
size, alignment, color,
start, end, aflags);
drm_mm_insert_helper(hole_node, node,
size, alignment, color,
start, end, aflags);
return 0;
}
EXPORT_SYMBOL(drm_mm_insert_node_in_range_generic);
@ -558,48 +456,6 @@ static int check_free_hole(u64 start, u64 end, u64 size, u64 alignment)
return end >= start + size;
}
static struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
u64 size,
u64 alignment,
unsigned long color,
enum drm_mm_search_flags flags)
{
struct drm_mm_node *entry;
struct drm_mm_node *best;
u64 adj_start;
u64 adj_end;
u64 best_size;
DRM_MM_BUG_ON(mm->scan_active);
best = NULL;
best_size = ~0UL;
__drm_mm_for_each_hole(entry, mm, adj_start, adj_end,
flags & DRM_MM_SEARCH_BELOW) {
u64 hole_size = adj_end - adj_start;
if (mm->color_adjust) {
mm->color_adjust(entry, color, &adj_start, &adj_end);
if (adj_end <= adj_start)
continue;
}
if (!check_free_hole(adj_start, adj_end, size, alignment))
continue;
if (!(flags & DRM_MM_SEARCH_BEST))
return entry;
if (hole_size < best_size) {
best = entry;
best_size = hole_size;
}
}
return best;
}
static struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
u64 size,
u64 alignment,

View File

@ -285,40 +285,6 @@ static inline u64 drm_mm_hole_node_end(const struct drm_mm_node *hole_node)
* Basic range manager support (drm_mm.c)
*/
int drm_mm_reserve_node(struct drm_mm *mm, struct drm_mm_node *node);
int drm_mm_insert_node_generic(struct drm_mm *mm,
struct drm_mm_node *node,
u64 size,
u64 alignment,
unsigned long color,
enum drm_mm_search_flags sflags,
enum drm_mm_allocator_flags aflags);
/**
* drm_mm_insert_node - search for space and insert @node
* @mm: drm_mm to allocate from
* @node: preallocate node to insert
* @size: size of the allocation
* @alignment: alignment of the allocation
* @flags: flags to fine-tune the allocation
*
* This is a simplified version of drm_mm_insert_node_generic() with @color set
* to 0.
*
* The preallocated node must be cleared to 0.
*
* Returns:
* 0 on success, -ENOSPC if there's no suitable hole.
*/
static inline int drm_mm_insert_node(struct drm_mm *mm,
struct drm_mm_node *node,
u64 size,
u64 alignment,
enum drm_mm_search_flags flags)
{
return drm_mm_insert_node_generic(mm, node, size, alignment, 0, flags,
DRM_MM_CREATE_DEFAULT);
}
int drm_mm_insert_node_in_range_generic(struct drm_mm *mm,
struct drm_mm_node *node,
u64 size,
@ -328,6 +294,7 @@ int drm_mm_insert_node_in_range_generic(struct drm_mm *mm,
u64 end,
enum drm_mm_search_flags sflags,
enum drm_mm_allocator_flags aflags);
/**
* drm_mm_insert_node_in_range - ranged search for space and insert @node
* @mm: drm_mm to allocate from
@ -359,6 +326,61 @@ static inline int drm_mm_insert_node_in_range(struct drm_mm *mm,
DRM_MM_CREATE_DEFAULT);
}
/**
* drm_mm_insert_node_generic - search for space and insert @node
* @mm: drm_mm to allocate from
* @node: preallocate node to insert
* @size: size of the allocation
* @alignment: alignment of the allocation
* @color: opaque tag value to use for this node
* @sflags: flags to fine-tune the allocation search
* @aflags: flags to fine-tune the allocation behavior
*
* The preallocated node must be cleared to 0.
*
* Returns:
* 0 on success, -ENOSPC if there's no suitable hole.
*/
static inline int
drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
u64 size, u64 alignment,
unsigned long color,
enum drm_mm_search_flags sflags,
enum drm_mm_allocator_flags aflags)
{
return drm_mm_insert_node_in_range_generic(mm, node,
size, alignment, 0,
0, U64_MAX,
sflags, aflags);
}
/**
* drm_mm_insert_node - search for space and insert @node
* @mm: drm_mm to allocate from
* @node: preallocate node to insert
* @size: size of the allocation
* @alignment: alignment of the allocation
* @flags: flags to fine-tune the allocation
*
* This is a simplified version of drm_mm_insert_node_generic() with @color set
* to 0.
*
* The preallocated node must be cleared to 0.
*
* Returns:
* 0 on success, -ENOSPC if there's no suitable hole.
*/
static inline int drm_mm_insert_node(struct drm_mm *mm,
struct drm_mm_node *node,
u64 size,
u64 alignment,
enum drm_mm_search_flags flags)
{
return drm_mm_insert_node_generic(mm, node,
size, alignment, 0,
flags, DRM_MM_CREATE_DEFAULT);
}
void drm_mm_remove_node(struct drm_mm_node *node);
void drm_mm_replace_node(struct drm_mm_node *old, struct drm_mm_node *new);
void drm_mm_init(struct drm_mm *mm, u64 start, u64 size);