summaryrefslogtreecommitdiffstats
path: root/include/drm
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2016-11-23 14:11:14 +0000
committerDaniel Vetter <daniel.vetter@ffwll.ch>2016-11-24 09:11:27 +0100
commit522e85dd8677e9cca40c3ae773f171e6a9eece31 (patch)
treefb7a4b57120aedb8f251e97d939e75cd3adc8fee /include/drm
parent6a8a66eda17ea6b4970f0c4724958eeababc6ae8 (diff)
downloadop-kernel-dev-522e85dd8677e9cca40c3ae773f171e6a9eece31.zip
op-kernel-dev-522e85dd8677e9cca40c3ae773f171e6a9eece31.tar.gz
drm: Define drm_mm_for_each_node_in_range()
Some clients would like to iterate over every node within a certain range. Make a nice little macro for them to hide the mixing of the rbtree search and linear walk. v2: Blurb Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk> Cc: Daniel Vetter <daniel.vetter@ffwll.ch> Cc: dri-devel@lists.freedesktop.org Reviewed-by: Joonas Lahtinen <joonas.lahtinen@linux.intel.com> Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch> Link: http://patchwork.freedesktop.org/patch/msgid/20161123141118.23876-1-chris@chris-wilson.co.uk
Diffstat (limited to 'include/drm')
-rw-r--r--include/drm/drm_mm.h22
1 files changed, 19 insertions, 3 deletions
diff --git a/include/drm/drm_mm.h b/include/drm/drm_mm.h
index 41ddafe..6add455 100644
--- a/include/drm/drm_mm.h
+++ b/include/drm/drm_mm.h
@@ -308,10 +308,26 @@ void drm_mm_takedown(struct drm_mm *mm);
bool drm_mm_clean(struct drm_mm *mm);
struct drm_mm_node *
-drm_mm_interval_first(struct drm_mm *mm, u64 start, u64 last);
+__drm_mm_interval_first(struct drm_mm *mm, u64 start, u64 last);
-struct drm_mm_node *
-drm_mm_interval_next(struct drm_mm_node *node, u64 start, u64 last);
+/**
+ * drm_mm_for_each_node_in_range - iterator to walk over a range of
+ * allocated nodes
+ * @node: drm_mm_node structure to assign to in each iteration step
+ * @mm: drm_mm allocator to walk
+ * @start: starting offset, the first node will overlap this
+ * @end: ending offset, the last node will start before this (but may overlap)
+ *
+ * This iterator walks over all nodes in the range allocator that lie
+ * between @start and @end. It is implemented similarly to list_for_each(),
+ * but using the internal interval tree to accelerate the search for the
+ * starting node, and so not safe against removal of elements. It assumes
+ * that @end is within (or is the upper limit of) the drm_mm allocator.
+ */
+#define drm_mm_for_each_node_in_range(node, mm, start, end) \
+ for (node = __drm_mm_interval_first((mm), (start), (end)-1); \
+ node && node->start < (end); \
+ node = list_next_entry(node, node_list)) \
void drm_mm_init_scan(struct drm_mm *mm,
u64 size,
OpenPOWER on IntegriCloud