Lines Matching defs:mm

52 static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, int atomic)
59 mtx_lock(&mm->unused_lock);
60 if (list_empty(&mm->unused_nodes))
64 list_entry(mm->unused_nodes.next,
67 --mm->num_unused;
69 mtx_unlock(&mm->unused_lock);
79 int drm_mm_pre_get(struct drm_mm *mm)
83 mtx_lock(&mm->unused_lock);
84 while (mm->num_unused < MM_UNUSED_TARGET) {
85 mtx_unlock(&mm->unused_lock);
87 mtx_lock(&mm->unused_lock);
90 int ret = (mm->num_unused < 2) ? -ENOMEM : 0;
91 mtx_unlock(&mm->unused_lock);
94 ++mm->num_unused;
95 list_add_tail(&node->node_list, &mm->unused_nodes);
97 mtx_unlock(&mm->unused_lock);
121 struct drm_mm *mm = hole_node->mm;
129 if (mm->color_adjust)
130 mm->color_adjust(hole_node, color, &adj_start, &adj_end);
145 node->mm = mm;
156 list_add(&node->hole_stack, &mm->hole_stack);
169 node = drm_mm_kmalloc(hole_node->mm, atomic);
184 int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node,
190 hole_node = drm_mm_search_free_generic(mm, size, alignment,
200 int drm_mm_insert_node(struct drm_mm *mm, struct drm_mm_node *node,
203 return drm_mm_insert_node_generic(mm, node, size, alignment, 0);
213 struct drm_mm *mm = hole_node->mm;
226 if (mm->color_adjust)
227 mm->color_adjust(hole_node, color, &adj_start, &adj_end);
242 node->mm = mm;
254 list_add(&node->hole_stack, &mm->hole_stack);
269 node = drm_mm_kmalloc(hole_node->mm, atomic);
285 int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *node,
291 hole_node = drm_mm_search_free_in_range_generic(mm,
304 int drm_mm_insert_node_in_range(struct drm_mm *mm, struct drm_mm_node *node,
308 return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end);
317 struct drm_mm *mm = node->mm;
336 list_add(&prev_node->hole_stack, &mm->hole_stack);
338 list_move(&prev_node->hole_stack, &mm->hole_stack);
353 struct drm_mm *mm = node->mm;
357 mtx_lock(&mm->unused_lock);
358 if (mm->num_unused < MM_UNUSED_TARGET) {
359 list_add(&node->node_list, &mm->unused_nodes);
360 ++mm->num_unused;
363 mtx_unlock(&mm->unused_lock);
382 struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm,
392 BUG_ON(mm->scanned_blocks);
397 list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
401 if (mm->color_adjust) {
402 mm->color_adjust(entry, color, &adj_start, &adj_end);
424 struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm,
436 BUG_ON(mm->scanned_blocks);
441 list_for_each_entry(entry, &mm->hole_stack, hole_stack) {
449 if (mm->color_adjust) {
450 mm->color_adjust(entry, color, &adj_start, &adj_end);
479 new->mm = old->mm;
498 void drm_mm_init_scan(struct drm_mm *mm,
503 mm->scan_color = color;
504 mm->scan_alignment = alignment;
505 mm->scan_size = size;
506 mm->scanned_blocks = 0;
507 mm->scan_hit_start = 0;
508 mm->scan_hit_end = 0;
509 mm->scan_check_range = 0;
510 mm->prev_scanned_node = NULL;
523 void drm_mm_init_scan_with_range(struct drm_mm *mm,
530 mm->scan_color = color;
531 mm->scan_alignment = alignment;
532 mm->scan_size = size;
533 mm->scanned_blocks = 0;
534 mm->scan_hit_start = 0;
535 mm->scan_hit_end = 0;
536 mm->scan_start = start;
537 mm->scan_end = end;
538 mm->scan_check_range = 1;
539 mm->prev_scanned_node = NULL;
551 struct drm_mm *mm = node->mm;
556 mm->scanned_blocks++;
568 node->node_list.next = &mm->prev_scanned_node->node_list;
569 mm->prev_scanned_node = node;
574 if (mm->scan_check_range) {
575 if (adj_start < mm->scan_start)
576 adj_start = mm->scan_start;
577 if (adj_end > mm->scan_end)
578 adj_end = mm->scan_end;
581 if (mm->color_adjust)
582 mm->color_adjust(prev_node, mm->scan_color,
586 mm->scan_size, mm->scan_alignment)) {
587 mm->scan_hit_start = hole_start;
588 mm->scan_hit_end = hole_end;
612 struct drm_mm *mm = node->mm;
615 mm->scanned_blocks--;
626 return (drm_mm_hole_node_end(node) > mm->scan_hit_start &&
627 node->start < mm->scan_hit_end);
631 int drm_mm_clean(struct drm_mm * mm)
633 struct list_head *head = &mm->head_node.node_list;
639 int drm_mm_init(struct drm_mm * mm, unsigned long start, unsigned long size)
641 INIT_LIST_HEAD(&mm->hole_stack);
642 INIT_LIST_HEAD(&mm->unused_nodes);
643 mm->num_unused = 0;
644 mm->scanned_blocks = 0;
645 mtx_init(&mm->unused_lock, "drm_unused", NULL, MTX_DEF);
648 INIT_LIST_HEAD(&mm->head_node.node_list);
649 INIT_LIST_HEAD(&mm->head_node.hole_stack);
650 mm->head_node.hole_follows = 1;
651 mm->head_node.scanned_block = 0;
652 mm->head_node.scanned_prev_free = 0;
653 mm->head_node.scanned_next_free = 0;
654 mm->head_node.mm = mm;
655 mm->head_node.start = start + size;
656 mm->head_node.size = start - mm->head_node.start;
657 list_add_tail(&mm->head_node.hole_stack, &mm->hole_stack);
659 mm->color_adjust = NULL;
665 void drm_mm_takedown(struct drm_mm * mm)
669 if (!list_empty(&mm->head_node.node_list)) {
674 mtx_lock(&mm->unused_lock);
675 list_for_each_entry_safe(entry, next, &mm->unused_nodes, node_list) {
678 --mm->num_unused;
680 mtx_unlock(&mm->unused_lock);
682 BUG_ON(mm->num_unused != 0);
686 void drm_mm_debug_table(struct drm_mm *mm, const char *prefix)
692 hole_start = drm_mm_hole_node_start(&mm->head_node);
693 hole_end = drm_mm_hole_node_end(&mm->head_node);
701 drm_mm_for_each_node(entry, mm) {