Lines Matching refs:vma
48 static inline void assert_vma_held_evict(const struct i915_vma *vma) in assert_vma_held_evict() argument
55 if (kref_read(&vma->vm->ref)) in assert_vma_held_evict()
56 assert_object_held_shared(vma->obj); in assert_vma_held_evict()
66 static void i915_vma_free(struct i915_vma *vma) in i915_vma_free() argument
68 return kmem_cache_free(slab_vmas, vma); in i915_vma_free()
75 static void vma_print_allocator(struct i915_vma *vma, const char *reason) in vma_print_allocator() argument
79 if (!vma->node.stack) { in vma_print_allocator()
80 drm_dbg(vma->obj->base.dev, in vma_print_allocator()
82 vma->node.start, vma->node.size, reason); in vma_print_allocator()
86 stack_depot_snprint(vma->node.stack, buf, sizeof(buf), 0); in vma_print_allocator()
87 drm_dbg(vma->obj->base.dev, in vma_print_allocator()
89 vma->node.start, vma->node.size, reason, buf); in vma_print_allocator()
94 static void vma_print_allocator(struct i915_vma *vma, const char *reason) in vma_print_allocator() argument
107 struct i915_vma *vma = active_to_vma(ref); in __i915_vma_active() local
109 if (!i915_vma_tryget(vma)) in __i915_vma_active()
116 if (!i915_vma_is_ggtt(vma)) { in __i915_vma_active()
124 intel_gt_pm_get_untracked(vma->vm->gt); in __i915_vma_active()
132 struct i915_vma *vma = active_to_vma(ref); in __i915_vma_retire() local
134 if (!i915_vma_is_ggtt(vma)) { in __i915_vma_retire()
139 intel_gt_pm_put_async_untracked(vma->vm->gt); in __i915_vma_retire()
142 i915_vma_put(vma); in __i915_vma_retire()
151 struct i915_vma *vma; in vma_create() local
158 vma = i915_vma_alloc(); in vma_create()
159 if (vma == NULL) in vma_create()
162 vma->ops = &vm->vma_ops; in vma_create()
163 vma->obj = obj; in vma_create()
164 vma->size = obj->base.size; in vma_create()
165 vma->display_alignment = I915_GTT_MIN_ALIGNMENT; in vma_create()
167 i915_active_init(&vma->active, __i915_vma_active, __i915_vma_retire, 0); in vma_create()
172 might_lock(&vma->active.mutex); in vma_create()
176 INIT_LIST_HEAD(&vma->closed_link); in vma_create()
177 INIT_LIST_HEAD(&vma->obj_link); in vma_create()
178 RB_CLEAR_NODE(&vma->obj_node); in vma_create()
181 vma->gtt_view = *view; in vma_create()
187 vma->size = view->partial.size; in vma_create()
188 vma->size <<= PAGE_SHIFT; in vma_create()
189 GEM_BUG_ON(vma->size > obj->base.size); in vma_create()
191 vma->size = intel_rotation_info_size(&view->rotated); in vma_create()
192 vma->size <<= PAGE_SHIFT; in vma_create()
194 vma->size = intel_remapped_info_size(&view->remapped); in vma_create()
195 vma->size <<= PAGE_SHIFT; in vma_create()
199 if (unlikely(vma->size > vm->total)) in vma_create()
202 GEM_BUG_ON(!IS_ALIGNED(vma->size, I915_GTT_PAGE_SIZE)); in vma_create()
210 vma->vm = vm; in vma_create()
211 list_add_tail(&vma->vm_link, &vm->unbound_list); in vma_create()
213 spin_lock(&obj->vma.lock); in vma_create()
215 if (unlikely(overflows_type(vma->size, u32))) in vma_create()
218 vma->fence_size = i915_gem_fence_size(vm->i915, vma->size, in vma_create()
221 if (unlikely(vma->fence_size < vma->size || /* overflow */ in vma_create()
222 vma->fence_size > vm->total)) in vma_create()
225 GEM_BUG_ON(!IS_ALIGNED(vma->fence_size, I915_GTT_MIN_ALIGNMENT)); in vma_create()
227 vma->fence_alignment = i915_gem_fence_alignment(vm->i915, vma->size, in vma_create()
230 GEM_BUG_ON(!is_power_of_2(vma->fence_alignment)); in vma_create()
232 __set_bit(I915_VMA_GGTT_BIT, __i915_vma_flags(vma)); in vma_create()
236 p = &obj->vma.tree.rb_node; in vma_create()
256 rb_link_node(&vma->obj_node, rb, p); in vma_create()
257 rb_insert_color(&vma->obj_node, &obj->vma.tree); in vma_create()
259 if (i915_vma_is_ggtt(vma)) in vma_create()
266 list_add(&vma->obj_link, &obj->vma.list); in vma_create()
268 list_add_tail(&vma->obj_link, &obj->vma.list); in vma_create()
270 spin_unlock(&obj->vma.lock); in vma_create()
273 return vma; in vma_create()
276 spin_unlock(&obj->vma.lock); in vma_create()
277 list_del_init(&vma->vm_link); in vma_create()
280 i915_vma_free(vma); in vma_create()
291 rb = obj->vma.tree.rb_node; in i915_vma_lookup()
293 struct i915_vma *vma = rb_entry(rb, struct i915_vma, obj_node); in i915_vma_lookup() local
296 cmp = i915_vma_compare(vma, vm, view); in i915_vma_lookup()
298 return vma; in i915_vma_lookup()
327 struct i915_vma *vma; in i915_vma_instance() local
332 spin_lock(&obj->vma.lock); in i915_vma_instance()
333 vma = i915_vma_lookup(obj, vm, view); in i915_vma_instance()
334 spin_unlock(&obj->vma.lock); in i915_vma_instance()
337 if (unlikely(!vma)) in i915_vma_instance()
338 vma = vma_create(obj, vm, view); in i915_vma_instance()
340 GEM_BUG_ON(!IS_ERR(vma) && i915_vma_compare(vma, vm, view)); in i915_vma_instance()
341 return vma; in i915_vma_instance()
405 int i915_vma_wait_for_bind(struct i915_vma *vma) in i915_vma_wait_for_bind() argument
409 if (rcu_access_pointer(vma->active.excl.fence)) { in i915_vma_wait_for_bind()
413 fence = dma_fence_get_rcu_safe(&vma->active.excl.fence); in i915_vma_wait_for_bind()
425 static int i915_vma_verify_bind_complete(struct i915_vma *vma) in i915_vma_verify_bind_complete() argument
427 struct dma_fence *fence = i915_active_fence_get(&vma->active.excl); in i915_vma_verify_bind_complete()
448 struct i915_vma *vma) in i915_vma_resource_init_from_vma() argument
450 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_resource_init_from_vma()
452 i915_vma_resource_init(vma_res, vma->vm, vma->pages, &vma->page_sizes, in i915_vma_resource_init_from_vma()
455 vma->ops, vma->private, __i915_vma_offset(vma), in i915_vma_resource_init_from_vma()
456 __i915_vma_size(vma), vma->size, vma->guard); in i915_vma_resource_init_from_vma()
472 int i915_vma_bind(struct i915_vma *vma, in i915_vma_bind() argument
482 lockdep_assert_held(&vma->vm->mutex); in i915_vma_bind()
483 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_bind()
484 GEM_BUG_ON(vma->size > i915_vma_size(vma)); in i915_vma_bind()
486 if (GEM_DEBUG_WARN_ON(range_overflows(vma->node.start, in i915_vma_bind()
487 vma->node.size, in i915_vma_bind()
488 vma->vm->total))) { in i915_vma_bind()
501 vma_flags = atomic_read(&vma->flags); in i915_vma_bind()
510 GEM_BUG_ON(!atomic_read(&vma->pages_count)); in i915_vma_bind()
513 if (work && bind_flags & vma->vm->bind_async_flags) in i915_vma_bind()
514 ret = i915_vma_resource_bind_dep_await(vma->vm, in i915_vma_bind()
516 vma->node.start, in i915_vma_bind()
517 vma->node.size, in i915_vma_bind()
523 ret = i915_vma_resource_bind_dep_sync(vma->vm, vma->node.start, in i915_vma_bind()
524 vma->node.size, true); in i915_vma_bind()
530 if (vma->resource || !vma_res) { in i915_vma_bind()
535 i915_vma_resource_init_from_vma(vma_res, vma); in i915_vma_bind()
536 vma->resource = vma_res; in i915_vma_bind()
538 trace_i915_vma_bind(vma, bind_flags); in i915_vma_bind()
539 if (work && bind_flags & vma->vm->bind_async_flags) { in i915_vma_bind()
542 work->vma_res = i915_vma_resource_get(vma->resource); in i915_vma_bind()
555 prev = i915_active_set_exclusive(&vma->active, &work->base.dma); in i915_vma_bind()
564 work->obj = i915_gem_object_get(vma->obj); in i915_vma_bind()
566 ret = i915_gem_object_wait_moving_fence(vma->obj, true); in i915_vma_bind()
568 i915_vma_resource_free(vma->resource); in i915_vma_bind()
569 vma->resource = NULL; in i915_vma_bind()
573 vma->ops->bind_vma(vma->vm, NULL, vma->resource, pat_index, in i915_vma_bind()
577 atomic_or(bind_flags, &vma->flags); in i915_vma_bind()
581 void __iomem *i915_vma_pin_iomap(struct i915_vma *vma) in i915_vma_pin_iomap() argument
586 if (WARN_ON_ONCE(vma->obj->flags & I915_BO_ALLOC_GPU_ONLY)) in i915_vma_pin_iomap()
589 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in i915_vma_pin_iomap()
590 GEM_BUG_ON(!i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND)); in i915_vma_pin_iomap()
591 GEM_BUG_ON(i915_vma_verify_bind_complete(vma)); in i915_vma_pin_iomap()
593 ptr = READ_ONCE(vma->iomap); in i915_vma_pin_iomap()
601 if (i915_gem_object_is_lmem(vma->obj)) { in i915_vma_pin_iomap()
602 ptr = i915_gem_object_lmem_io_map(vma->obj, 0, in i915_vma_pin_iomap()
603 vma->obj->base.size); in i915_vma_pin_iomap()
604 } else if (i915_vma_is_map_and_fenceable(vma)) { in i915_vma_pin_iomap()
605 ptr = io_mapping_map_wc(&i915_vm_to_ggtt(vma->vm)->iomap, in i915_vma_pin_iomap()
606 i915_vma_offset(vma), in i915_vma_pin_iomap()
607 i915_vma_size(vma)); in i915_vma_pin_iomap()
610 i915_gem_object_pin_map(vma->obj, I915_MAP_WC); in i915_vma_pin_iomap()
623 if (unlikely(cmpxchg(&vma->iomap, NULL, ptr))) { in i915_vma_pin_iomap()
625 __i915_gem_object_release_map(vma->obj); in i915_vma_pin_iomap()
628 ptr = vma->iomap; in i915_vma_pin_iomap()
632 __i915_vma_pin(vma); in i915_vma_pin_iomap()
634 err = i915_vma_pin_fence(vma); in i915_vma_pin_iomap()
638 i915_vma_set_ggtt_write(vma); in i915_vma_pin_iomap()
644 __i915_vma_unpin(vma); in i915_vma_pin_iomap()
649 void i915_vma_flush_writes(struct i915_vma *vma) in i915_vma_flush_writes() argument
651 if (i915_vma_unset_ggtt_write(vma)) in i915_vma_flush_writes()
652 intel_gt_flush_ggtt_writes(vma->vm->gt); in i915_vma_flush_writes()
655 void i915_vma_unpin_iomap(struct i915_vma *vma) in i915_vma_unpin_iomap() argument
657 GEM_BUG_ON(vma->iomap == NULL); in i915_vma_unpin_iomap()
661 i915_vma_flush_writes(vma); in i915_vma_unpin_iomap()
663 i915_vma_unpin_fence(vma); in i915_vma_unpin_iomap()
664 i915_vma_unpin(vma); in i915_vma_unpin_iomap()
669 struct i915_vma *vma; in i915_vma_unpin_and_release() local
672 vma = fetch_and_zero(p_vma); in i915_vma_unpin_and_release()
673 if (!vma) in i915_vma_unpin_and_release()
676 obj = vma->obj; in i915_vma_unpin_and_release()
679 i915_vma_unpin(vma); in i915_vma_unpin_and_release()
687 bool i915_vma_misplaced(const struct i915_vma *vma, in i915_vma_misplaced() argument
690 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_misplaced()
693 if (test_bit(I915_VMA_ERROR_BIT, __i915_vma_flags(vma))) in i915_vma_misplaced()
696 if (i915_vma_size(vma) < size) in i915_vma_misplaced()
700 if (alignment && !IS_ALIGNED(i915_vma_offset(vma), alignment)) in i915_vma_misplaced()
703 if (flags & PIN_MAPPABLE && !i915_vma_is_map_and_fenceable(vma)) in i915_vma_misplaced()
707 i915_vma_offset(vma) < (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
711 i915_vma_offset(vma) != (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
715 vma->guard < (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
721 void __i915_vma_set_map_and_fenceable(struct i915_vma *vma) in __i915_vma_set_map_and_fenceable() argument
725 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in __i915_vma_set_map_and_fenceable()
726 GEM_BUG_ON(!vma->fence_size); in __i915_vma_set_map_and_fenceable()
728 fenceable = (i915_vma_size(vma) >= vma->fence_size && in __i915_vma_set_map_and_fenceable()
729 IS_ALIGNED(i915_vma_offset(vma), vma->fence_alignment)); in __i915_vma_set_map_and_fenceable()
731 mappable = i915_ggtt_offset(vma) + vma->fence_size <= in __i915_vma_set_map_and_fenceable()
732 i915_vm_to_ggtt(vma->vm)->mappable_end; in __i915_vma_set_map_and_fenceable()
735 set_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma)); in __i915_vma_set_map_and_fenceable()
737 clear_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma)); in __i915_vma_set_map_and_fenceable()
740 bool i915_gem_valid_gtt_space(struct i915_vma *vma, unsigned long color) in i915_gem_valid_gtt_space() argument
742 struct drm_mm_node *node = &vma->node; in i915_gem_valid_gtt_space()
752 if (!i915_vm_has_cache_coloring(vma->vm)) in i915_gem_valid_gtt_space()
788 i915_vma_insert(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in i915_vma_insert() argument
795 GEM_BUG_ON(i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND | I915_VMA_LOCAL_BIND)); in i915_vma_insert()
796 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in i915_vma_insert()
799 size = max(size, vma->size); in i915_vma_insert()
800 alignment = max_t(typeof(alignment), alignment, vma->display_alignment); in i915_vma_insert()
802 size = max_t(typeof(size), size, vma->fence_size); in i915_vma_insert()
804 alignment, vma->fence_alignment); in i915_vma_insert()
811 guard = vma->guard; /* retain guard across rebinds */ in i915_vma_insert()
826 end = vma->vm->total; in i915_vma_insert()
828 end = min_t(u64, end, i915_vm_to_ggtt(vma->vm)->mappable_end); in i915_vma_insert()
833 alignment = max(alignment, i915_vm_obj_min_alignment(vma->vm, vma->obj)); in i915_vma_insert()
841 drm_dbg(vma->obj->base.dev, in i915_vma_insert()
849 if (i915_vm_has_cache_coloring(vma->vm)) in i915_vma_insert()
850 color = vma->obj->pat_index; in i915_vma_insert()
866 ret = i915_gem_gtt_reserve(vma->vm, ww, &vma->node, in i915_vma_insert()
883 vma->page_sizes.sg > I915_GTT_PAGE_SIZE && in i915_vma_insert()
884 !HAS_64K_PAGES(vma->vm->i915)) { in i915_vma_insert()
892 rounddown_pow_of_two(vma->page_sizes.sg | in i915_vma_insert()
900 GEM_BUG_ON(i915_vma_is_ggtt(vma)); in i915_vma_insert()
904 if (vma->page_sizes.sg & I915_GTT_PAGE_SIZE_64K) in i915_vma_insert()
908 ret = i915_gem_gtt_insert(vma->vm, ww, &vma->node, in i915_vma_insert()
914 GEM_BUG_ON(vma->node.start < start); in i915_vma_insert()
915 GEM_BUG_ON(vma->node.start + vma->node.size > end); in i915_vma_insert()
917 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_insert()
918 GEM_BUG_ON(!i915_gem_valid_gtt_space(vma, color)); in i915_vma_insert()
920 list_move_tail(&vma->vm_link, &vma->vm->bound_list); in i915_vma_insert()
921 vma->guard = guard; in i915_vma_insert()
927 i915_vma_detach(struct i915_vma *vma) in i915_vma_detach() argument
929 GEM_BUG_ON(!drm_mm_node_allocated(&vma->node)); in i915_vma_detach()
930 GEM_BUG_ON(i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND | I915_VMA_LOCAL_BIND)); in i915_vma_detach()
937 list_move_tail(&vma->vm_link, &vma->vm->unbound_list); in i915_vma_detach()
940 static bool try_qad_pin(struct i915_vma *vma, unsigned int flags) in try_qad_pin() argument
944 bound = atomic_read(&vma->flags); in try_qad_pin()
962 } while (!atomic_try_cmpxchg(&vma->flags, &bound, bound + 1)); in try_qad_pin()
1300 __i915_vma_get_pages(struct i915_vma *vma) in __i915_vma_get_pages() argument
1310 GEM_BUG_ON(!i915_gem_object_has_pinned_pages(vma->obj)); in __i915_vma_get_pages()
1312 switch (vma->gtt_view.type) { in __i915_vma_get_pages()
1314 GEM_BUG_ON(vma->gtt_view.type); in __i915_vma_get_pages()
1317 pages = vma->obj->mm.pages; in __i915_vma_get_pages()
1322 intel_rotate_pages(&vma->gtt_view.rotated, vma->obj); in __i915_vma_get_pages()
1327 intel_remap_pages(&vma->gtt_view.remapped, vma->obj); in __i915_vma_get_pages()
1331 pages = intel_partial_pages(&vma->gtt_view, vma->obj); in __i915_vma_get_pages()
1336 drm_err(&vma->vm->i915->drm, in __i915_vma_get_pages()
1338 vma->gtt_view.type, PTR_ERR(pages)); in __i915_vma_get_pages()
1342 vma->pages = pages; in __i915_vma_get_pages()
1347 I915_SELFTEST_EXPORT int i915_vma_get_pages(struct i915_vma *vma) in i915_vma_get_pages() argument
1351 if (atomic_add_unless(&vma->pages_count, 1, 0)) in i915_vma_get_pages()
1354 err = i915_gem_object_pin_pages(vma->obj); in i915_vma_get_pages()
1358 err = __i915_vma_get_pages(vma); in i915_vma_get_pages()
1362 vma->page_sizes = vma->obj->mm.page_sizes; in i915_vma_get_pages()
1363 atomic_inc(&vma->pages_count); in i915_vma_get_pages()
1368 __i915_gem_object_unpin_pages(vma->obj); in i915_vma_get_pages()
1394 static void __vma_put_pages(struct i915_vma *vma, unsigned int count) in __vma_put_pages() argument
1397 GEM_BUG_ON(atomic_read(&vma->pages_count) < count); in __vma_put_pages()
1399 if (atomic_sub_return(count, &vma->pages_count) == 0) { in __vma_put_pages()
1400 if (vma->pages != vma->obj->mm.pages) { in __vma_put_pages()
1401 sg_free_table(vma->pages); in __vma_put_pages()
1402 kfree(vma->pages); in __vma_put_pages()
1404 vma->pages = NULL; in __vma_put_pages()
1406 i915_gem_object_unpin_pages(vma->obj); in __vma_put_pages()
1410 I915_SELFTEST_EXPORT void i915_vma_put_pages(struct i915_vma *vma) in i915_vma_put_pages() argument
1412 if (atomic_add_unless(&vma->pages_count, -1, 1)) in i915_vma_put_pages()
1415 __vma_put_pages(vma, 1); in i915_vma_put_pages()
1418 static void vma_unbind_pages(struct i915_vma *vma) in vma_unbind_pages() argument
1422 lockdep_assert_held(&vma->vm->mutex); in vma_unbind_pages()
1425 count = atomic_read(&vma->pages_count); in vma_unbind_pages()
1429 __vma_put_pages(vma, count | count << I915_VMA_PAGES_BIAS); in vma_unbind_pages()
1432 int i915_vma_pin_ww(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in i915_vma_pin_ww() argument
1442 assert_vma_held(vma); in i915_vma_pin_ww()
1451 if (try_qad_pin(vma, flags)) in i915_vma_pin_ww()
1454 err = i915_vma_get_pages(vma); in i915_vma_pin_ww()
1465 wakeref = intel_runtime_pm_get(&vma->vm->i915->runtime_pm); in i915_vma_pin_ww()
1467 if (flags & vma->vm->bind_async_flags) { in i915_vma_pin_ww()
1469 err = i915_vm_lock_objects(vma->vm, ww); in i915_vma_pin_ww()
1479 work->vm = vma->vm; in i915_vma_pin_ww()
1481 err = i915_gem_object_get_moving_fence(vma->obj, &moving); in i915_vma_pin_ww()
1488 if (vma->vm->allocate_va_range) { in i915_vma_pin_ww()
1489 err = i915_vm_alloc_pt_stash(vma->vm, in i915_vma_pin_ww()
1491 vma->size); in i915_vma_pin_ww()
1495 err = i915_vm_map_pt_stash(vma->vm, &work->stash); in i915_vma_pin_ww()
1524 err = mutex_lock_interruptible_nested(&vma->vm->mutex, in i915_vma_pin_ww()
1531 if (unlikely(i915_vma_is_closed(vma))) { in i915_vma_pin_ww()
1536 bound = atomic_read(&vma->flags); in i915_vma_pin_ww()
1549 __i915_vma_pin(vma); in i915_vma_pin_ww()
1553 err = i915_active_acquire(&vma->active); in i915_vma_pin_ww()
1558 err = i915_vma_insert(vma, ww, size, alignment, flags); in i915_vma_pin_ww()
1562 if (i915_is_ggtt(vma->vm)) in i915_vma_pin_ww()
1563 __i915_vma_set_map_and_fenceable(vma); in i915_vma_pin_ww()
1566 GEM_BUG_ON(!vma->pages); in i915_vma_pin_ww()
1567 err = i915_vma_bind(vma, in i915_vma_pin_ww()
1568 vma->obj->pat_index, in i915_vma_pin_ww()
1576 atomic_add(I915_VMA_PAGES_ACTIVE, &vma->pages_count); in i915_vma_pin_ww()
1577 list_move_tail(&vma->vm_link, &vma->vm->bound_list); in i915_vma_pin_ww()
1580 __i915_vma_pin(vma); in i915_vma_pin_ww()
1581 GEM_BUG_ON(!i915_vma_is_pinned(vma)); in i915_vma_pin_ww()
1583 GEM_BUG_ON(!i915_vma_is_bound(vma, flags)); in i915_vma_pin_ww()
1584 GEM_BUG_ON(i915_vma_misplaced(vma, size, alignment, flags)); in i915_vma_pin_ww()
1587 if (!i915_vma_is_bound(vma, I915_VMA_BIND_MASK)) { in i915_vma_pin_ww()
1588 i915_vma_detach(vma); in i915_vma_pin_ww()
1589 drm_mm_remove_node(&vma->node); in i915_vma_pin_ww()
1592 i915_active_release(&vma->active); in i915_vma_pin_ww()
1594 mutex_unlock(&vma->vm->mutex); in i915_vma_pin_ww()
1601 intel_runtime_pm_put(&vma->vm->i915->runtime_pm, wakeref); in i915_vma_pin_ww()
1606 i915_vma_put_pages(vma); in i915_vma_pin_ww()
1621 static int __i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in __i915_ggtt_pin() argument
1624 struct i915_address_space *vm = vma->vm; in __i915_ggtt_pin()
1630 err = i915_vma_pin_ww(vma, ww, 0, align, flags | PIN_GLOBAL); in __i915_ggtt_pin()
1634 err = i915_vma_wait_for_bind(vma); in __i915_ggtt_pin()
1636 i915_vma_unpin(vma); in __i915_ggtt_pin()
1656 int i915_ggtt_pin(struct i915_vma *vma, struct i915_gem_ww_ctx *ww, in i915_ggtt_pin() argument
1662 GEM_BUG_ON(!i915_vma_is_ggtt(vma)); in i915_ggtt_pin()
1665 return __i915_ggtt_pin(vma, ww, align, flags); in i915_ggtt_pin()
1667 lockdep_assert_not_held(&vma->obj->base.resv->lock.base); in i915_ggtt_pin()
1670 err = i915_gem_object_lock(vma->obj, &_ww); in i915_ggtt_pin()
1672 err = __i915_ggtt_pin(vma, &_ww, align, flags); in i915_ggtt_pin()
1688 struct i915_vma *vma; in i915_ggtt_clear_scanout() local
1690 spin_lock(&obj->vma.lock); in i915_ggtt_clear_scanout()
1691 for_each_ggtt_vma(vma, obj) { in i915_ggtt_clear_scanout()
1692 i915_vma_clear_scanout(vma); in i915_ggtt_clear_scanout()
1693 vma->display_alignment = I915_GTT_MIN_ALIGNMENT; in i915_ggtt_clear_scanout()
1695 spin_unlock(&obj->vma.lock); in i915_ggtt_clear_scanout()
1698 static void __vma_close(struct i915_vma *vma, struct intel_gt *gt) in __vma_close() argument
1712 GEM_BUG_ON(i915_vma_is_closed(vma)); in __vma_close()
1713 list_add(&vma->closed_link, >->closed_vma); in __vma_close()
1716 void i915_vma_close(struct i915_vma *vma) in i915_vma_close() argument
1718 struct intel_gt *gt = vma->vm->gt; in i915_vma_close()
1721 if (i915_vma_is_ggtt(vma)) in i915_vma_close()
1724 GEM_BUG_ON(!atomic_read(&vma->open_count)); in i915_vma_close()
1725 if (atomic_dec_and_lock_irqsave(&vma->open_count, in i915_vma_close()
1728 __vma_close(vma, gt); in i915_vma_close()
1733 static void __i915_vma_remove_closed(struct i915_vma *vma) in __i915_vma_remove_closed() argument
1735 list_del_init(&vma->closed_link); in __i915_vma_remove_closed()
1738 void i915_vma_reopen(struct i915_vma *vma) in i915_vma_reopen() argument
1740 struct intel_gt *gt = vma->vm->gt; in i915_vma_reopen()
1743 if (i915_vma_is_closed(vma)) in i915_vma_reopen()
1744 __i915_vma_remove_closed(vma); in i915_vma_reopen()
1748 static void force_unbind(struct i915_vma *vma) in force_unbind() argument
1750 if (!drm_mm_node_allocated(&vma->node)) in force_unbind()
1753 atomic_and(~I915_VMA_PIN_MASK, &vma->flags); in force_unbind()
1754 WARN_ON(__i915_vma_unbind(vma)); in force_unbind()
1755 GEM_BUG_ON(drm_mm_node_allocated(&vma->node)); in force_unbind()
1758 static void release_references(struct i915_vma *vma, struct intel_gt *gt, in release_references() argument
1761 struct drm_i915_gem_object *obj = vma->obj; in release_references()
1763 GEM_BUG_ON(i915_vma_is_active(vma)); in release_references()
1765 spin_lock(&obj->vma.lock); in release_references()
1766 list_del(&vma->obj_link); in release_references()
1767 if (!RB_EMPTY_NODE(&vma->obj_node)) in release_references()
1768 rb_erase(&vma->obj_node, &obj->vma.tree); in release_references()
1770 spin_unlock(&obj->vma.lock); in release_references()
1773 __i915_vma_remove_closed(vma); in release_references()
1777 i915_vm_resv_put(vma->vm); in release_references()
1779 i915_active_fini(&vma->active); in release_references()
1780 GEM_WARN_ON(vma->resource); in release_references()
1781 i915_vma_free(vma); in release_references()
1810 void i915_vma_destroy_locked(struct i915_vma *vma) in i915_vma_destroy_locked() argument
1812 lockdep_assert_held(&vma->vm->mutex); in i915_vma_destroy_locked()
1814 force_unbind(vma); in i915_vma_destroy_locked()
1815 list_del_init(&vma->vm_link); in i915_vma_destroy_locked()
1816 release_references(vma, vma->vm->gt, false); in i915_vma_destroy_locked()
1819 void i915_vma_destroy(struct i915_vma *vma) in i915_vma_destroy() argument
1824 mutex_lock(&vma->vm->mutex); in i915_vma_destroy()
1825 force_unbind(vma); in i915_vma_destroy()
1826 list_del_init(&vma->vm_link); in i915_vma_destroy()
1827 vm_ddestroy = vma->vm_ddestroy; in i915_vma_destroy()
1828 vma->vm_ddestroy = false; in i915_vma_destroy()
1831 gt = vma->vm->gt; in i915_vma_destroy()
1832 mutex_unlock(&vma->vm->mutex); in i915_vma_destroy()
1833 release_references(vma, gt, vm_ddestroy); in i915_vma_destroy()
1838 struct i915_vma *vma, *next; in i915_vma_parked() local
1842 list_for_each_entry_safe(vma, next, >->closed_vma, closed_link) { in i915_vma_parked()
1843 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_parked()
1844 struct i915_address_space *vm = vma->vm; in i915_vma_parked()
1856 list_move(&vma->closed_link, &closed); in i915_vma_parked()
1861 list_for_each_entry_safe(vma, next, &closed, closed_link) { in i915_vma_parked()
1862 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_parked()
1863 struct i915_address_space *vm = vma->vm; in i915_vma_parked()
1866 INIT_LIST_HEAD(&vma->closed_link); in i915_vma_parked()
1867 i915_vma_destroy(vma); in i915_vma_parked()
1872 list_add(&vma->closed_link, >->closed_vma); in i915_vma_parked()
1881 static void __i915_vma_iounmap(struct i915_vma *vma) in __i915_vma_iounmap() argument
1883 GEM_BUG_ON(i915_vma_is_pinned(vma)); in __i915_vma_iounmap()
1885 if (vma->iomap == NULL) in __i915_vma_iounmap()
1888 if (page_unmask_bits(vma->iomap)) in __i915_vma_iounmap()
1889 __i915_gem_object_release_map(vma->obj); in __i915_vma_iounmap()
1891 io_mapping_unmap(vma->iomap); in __i915_vma_iounmap()
1892 vma->iomap = NULL; in __i915_vma_iounmap()
1895 void i915_vma_revoke_mmap(struct i915_vma *vma) in i915_vma_revoke_mmap() argument
1900 if (!i915_vma_has_userfault(vma)) in i915_vma_revoke_mmap()
1903 GEM_BUG_ON(!i915_vma_is_map_and_fenceable(vma)); in i915_vma_revoke_mmap()
1904 GEM_BUG_ON(!vma->obj->userfault_count); in i915_vma_revoke_mmap()
1906 node = &vma->mmo->vma_node; in i915_vma_revoke_mmap()
1907 vma_offset = vma->gtt_view.partial.offset << PAGE_SHIFT; in i915_vma_revoke_mmap()
1908 unmap_mapping_range(vma->vm->i915->drm.anon_inode->i_mapping, in i915_vma_revoke_mmap()
1910 vma->size, in i915_vma_revoke_mmap()
1913 i915_vma_unset_userfault(vma); in i915_vma_revoke_mmap()
1914 if (!--vma->obj->userfault_count) in i915_vma_revoke_mmap()
1915 list_del(&vma->obj->userfault_link); in i915_vma_revoke_mmap()
1919 __i915_request_await_bind(struct i915_request *rq, struct i915_vma *vma) in __i915_request_await_bind() argument
1921 return __i915_request_await_exclusive(rq, &vma->active); in __i915_request_await_bind()
1924 static int __i915_vma_move_to_active(struct i915_vma *vma, struct i915_request *rq) in __i915_vma_move_to_active() argument
1929 err = __i915_request_await_bind(rq, vma); in __i915_vma_move_to_active()
1933 return i915_active_add_request(&vma->active, rq); in __i915_vma_move_to_active()
1936 int _i915_vma_move_to_active(struct i915_vma *vma, in _i915_vma_move_to_active() argument
1941 struct drm_i915_gem_object *obj = vma->obj; in _i915_vma_move_to_active()
1946 GEM_BUG_ON(!vma->pages); in _i915_vma_move_to_active()
1949 err = i915_request_await_object(rq, vma->obj, flags & EXEC_OBJECT_WRITE); in _i915_vma_move_to_active()
1953 err = __i915_vma_move_to_active(vma, rq); in _i915_vma_move_to_active()
1967 err = dma_resv_reserve_fences(vma->obj->base.resv, idx); in _i915_vma_move_to_active()
1998 dma_resv_add_fence(vma->obj->base.resv, curr, usage); in _i915_vma_move_to_active()
2001 if (flags & EXEC_OBJECT_NEEDS_FENCE && vma->fence) in _i915_vma_move_to_active()
2002 i915_active_add_request(&vma->fence->active, rq); in _i915_vma_move_to_active()
2007 GEM_BUG_ON(!i915_vma_is_active(vma)); in _i915_vma_move_to_active()
2011 struct dma_fence *__i915_vma_evict(struct i915_vma *vma, bool async) in __i915_vma_evict() argument
2013 struct i915_vma_resource *vma_res = vma->resource; in __i915_vma_evict()
2016 GEM_BUG_ON(i915_vma_is_pinned(vma)); in __i915_vma_evict()
2017 assert_vma_held_evict(vma); in __i915_vma_evict()
2019 if (i915_vma_is_map_and_fenceable(vma)) { in __i915_vma_evict()
2021 i915_vma_revoke_mmap(vma); in __i915_vma_evict()
2036 i915_vma_flush_writes(vma); in __i915_vma_evict()
2039 i915_vma_revoke_fence(vma); in __i915_vma_evict()
2041 clear_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(vma)); in __i915_vma_evict()
2044 __i915_vma_iounmap(vma); in __i915_vma_evict()
2046 GEM_BUG_ON(vma->fence); in __i915_vma_evict()
2047 GEM_BUG_ON(i915_vma_has_userfault(vma)); in __i915_vma_evict()
2050 GEM_WARN_ON(async && !vma->resource->bi.pages_rsgt); in __i915_vma_evict()
2053 vma_res->needs_wakeref = i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND) && in __i915_vma_evict()
2054 kref_read(&vma->vm->ref); in __i915_vma_evict()
2055 vma_res->skip_pte_rewrite = !kref_read(&vma->vm->ref) || in __i915_vma_evict()
2056 vma->vm->skip_pte_rewrite; in __i915_vma_evict()
2057 trace_i915_vma_unbind(vma); in __i915_vma_evict()
2061 vma->obj->mm.tlb); in __i915_vma_evict()
2065 vma->resource = NULL; in __i915_vma_evict()
2068 &vma->flags); in __i915_vma_evict()
2070 i915_vma_detach(vma); in __i915_vma_evict()
2078 vma_invalidate_tlb(vma->vm, vma->obj->mm.tlb); in __i915_vma_evict()
2087 vma_unbind_pages(vma); in __i915_vma_evict()
2091 int __i915_vma_unbind(struct i915_vma *vma) in __i915_vma_unbind() argument
2095 lockdep_assert_held(&vma->vm->mutex); in __i915_vma_unbind()
2096 assert_vma_held_evict(vma); in __i915_vma_unbind()
2098 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind()
2101 if (i915_vma_is_pinned(vma)) { in __i915_vma_unbind()
2102 vma_print_allocator(vma, "is pinned"); in __i915_vma_unbind()
2111 ret = i915_vma_sync(vma); in __i915_vma_unbind()
2115 GEM_BUG_ON(i915_vma_is_active(vma)); in __i915_vma_unbind()
2116 __i915_vma_evict(vma, false); in __i915_vma_unbind()
2118 drm_mm_remove_node(&vma->node); /* pairs with i915_vma_release() */ in __i915_vma_unbind()
2122 static struct dma_fence *__i915_vma_unbind_async(struct i915_vma *vma) in __i915_vma_unbind_async() argument
2126 lockdep_assert_held(&vma->vm->mutex); in __i915_vma_unbind_async()
2128 if (!drm_mm_node_allocated(&vma->node)) in __i915_vma_unbind_async()
2131 if (i915_vma_is_pinned(vma) || in __i915_vma_unbind_async()
2132 &vma->obj->mm.rsgt->table != vma->resource->bi.pages) in __i915_vma_unbind_async()
2144 if (i915_sw_fence_await_active(&vma->resource->chain, &vma->active, in __i915_vma_unbind_async()
2150 fence = __i915_vma_evict(vma, true); in __i915_vma_unbind_async()
2152 drm_mm_remove_node(&vma->node); /* pairs with i915_vma_release() */ in __i915_vma_unbind_async()
2157 int i915_vma_unbind(struct i915_vma *vma) in i915_vma_unbind() argument
2159 struct i915_address_space *vm = vma->vm; in i915_vma_unbind()
2163 assert_object_held_shared(vma->obj); in i915_vma_unbind()
2166 err = i915_vma_sync(vma); in i915_vma_unbind()
2170 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_unbind()
2173 if (i915_vma_is_pinned(vma)) { in i915_vma_unbind()
2174 vma_print_allocator(vma, "is pinned"); in i915_vma_unbind()
2178 if (i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND)) in i915_vma_unbind()
2182 err = mutex_lock_interruptible_nested(&vma->vm->mutex, !wakeref); in i915_vma_unbind()
2186 err = __i915_vma_unbind(vma); in i915_vma_unbind()
2195 int i915_vma_unbind_async(struct i915_vma *vma, bool trylock_vm) in i915_vma_unbind_async() argument
2197 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_unbind_async()
2198 struct i915_address_space *vm = vma->vm; in i915_vma_unbind_async()
2209 if (!drm_mm_node_allocated(&vma->node)) in i915_vma_unbind_async()
2212 if (i915_vma_is_pinned(vma)) { in i915_vma_unbind_async()
2213 vma_print_allocator(vma, "is pinned"); in i915_vma_unbind_async()
2229 if (i915_vma_is_bound(vma, I915_VMA_GLOBAL_BIND)) in i915_vma_unbind_async()
2241 fence = __i915_vma_unbind_async(vma); in i915_vma_unbind_async()
2257 int i915_vma_unbind_unlocked(struct i915_vma *vma) in i915_vma_unbind_unlocked() argument
2261 i915_gem_object_lock(vma->obj, NULL); in i915_vma_unbind_unlocked()
2262 err = i915_vma_unbind(vma); in i915_vma_unbind_unlocked()
2263 i915_gem_object_unlock(vma->obj); in i915_vma_unbind_unlocked()
2268 struct i915_vma *i915_vma_make_unshrinkable(struct i915_vma *vma) in i915_vma_make_unshrinkable() argument
2270 i915_gem_object_make_unshrinkable(vma->obj); in i915_vma_make_unshrinkable()
2271 return vma; in i915_vma_make_unshrinkable()
2274 void i915_vma_make_shrinkable(struct i915_vma *vma) in i915_vma_make_shrinkable() argument
2276 i915_gem_object_make_shrinkable(vma->obj); in i915_vma_make_shrinkable()
2279 void i915_vma_make_purgeable(struct i915_vma *vma) in i915_vma_make_purgeable() argument
2281 i915_gem_object_make_purgeable(vma->obj); in i915_vma_make_purgeable()