Lines Matching refs:bo

32 static int __qxl_bo_pin(struct qxl_bo *bo);
33 static void __qxl_bo_unpin(struct qxl_bo *bo);
37 struct qxl_bo *bo;
40 bo = to_qxl_bo(tbo);
41 qdev = to_qxl(bo->tbo.base.dev);
43 qxl_surface_evict(qdev, bo, false);
44 WARN_ON_ONCE(bo->map_count > 0);
46 list_del_init(&bo->list);
48 drm_gem_object_release(&bo->tbo.base);
49 kfree(bo);
52 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo)
54 if (bo->destroy == &qxl_ttm_bo_destroy)
113 struct qxl_bo *bo;
122 bo = kzalloc(sizeof(struct qxl_bo), GFP_KERNEL);
123 if (bo == NULL)
126 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size);
128 kfree(bo);
131 bo->tbo.base.funcs = &qxl_object_funcs;
132 bo->type = domain;
133 bo->surface_id = 0;
134 INIT_LIST_HEAD(&bo->list);
137 bo->surf = *surf;
139 qxl_ttm_placement_from_domain(bo, domain);
141 bo->tbo.priority = priority;
142 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type,
143 &bo->placement, 0, &ctx, NULL, NULL,
153 ttm_bo_pin(&bo->tbo);
154 ttm_bo_unreserve(&bo->tbo);
155 *bo_ptr = bo;
159 int qxl_bo_vmap_locked(struct qxl_bo *bo, struct iosys_map *map)
163 dma_resv_assert_held(bo->tbo.base.resv);
165 if (bo->kptr) {
166 bo->map_count++;
170 r = __qxl_bo_pin(bo);
174 r = ttm_bo_vmap(&bo->tbo, &bo->map);
176 __qxl_bo_unpin(bo);
179 bo->map_count = 1;
182 if (bo->map.is_iomem)
183 bo->kptr = (void *)bo->map.vaddr_iomem;
185 bo->kptr = bo->map.vaddr;
188 *map = bo->map;
192 int qxl_bo_vmap(struct qxl_bo *bo, struct iosys_map *map)
196 r = qxl_bo_reserve(bo);
200 r = qxl_bo_vmap_locked(bo, map);
201 qxl_bo_unreserve(bo);
206 struct qxl_bo *bo, int page_offset)
214 if (bo->tbo.resource->mem_type == TTM_PL_VRAM)
216 else if (bo->tbo.resource->mem_type == TTM_PL_PRIV)
221 offset = bo->tbo.resource->start << PAGE_SHIFT;
224 if (bo->kptr) {
225 rptr = bo->kptr + (page_offset * PAGE_SIZE);
229 ret = qxl_bo_vmap_locked(bo, &bo_map);
238 void qxl_bo_vunmap_locked(struct qxl_bo *bo)
240 dma_resv_assert_held(bo->tbo.base.resv);
242 if (bo->kptr == NULL)
244 bo->map_count--;
245 if (bo->map_count > 0)
247 bo->kptr = NULL;
248 ttm_bo_vunmap(&bo->tbo, &bo->map);
249 __qxl_bo_unpin(bo);
252 int qxl_bo_vunmap(struct qxl_bo *bo)
256 r = qxl_bo_reserve(bo);
260 qxl_bo_vunmap_locked(bo);
261 qxl_bo_unreserve(bo);
266 struct qxl_bo *bo, void *pmap)
268 if ((bo->tbo.resource->mem_type != TTM_PL_VRAM) &&
269 (bo->tbo.resource->mem_type != TTM_PL_PRIV))
275 qxl_bo_vunmap_locked(bo);
278 void qxl_bo_unref(struct qxl_bo **bo)
280 if ((*bo) == NULL)
283 drm_gem_object_put(&(*bo)->tbo.base);
284 *bo = NULL;
287 struct qxl_bo *qxl_bo_ref(struct qxl_bo *bo)
289 drm_gem_object_get(&bo->tbo.base);
290 return bo;
293 static int __qxl_bo_pin(struct qxl_bo *bo)
296 struct drm_device *ddev = bo->tbo.base.dev;
299 if (bo->tbo.pin_count) {
300 ttm_bo_pin(&bo->tbo);
303 qxl_ttm_placement_from_domain(bo, bo->type);
304 r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
306 ttm_bo_pin(&bo->tbo);
308 dev_err(ddev->dev, "%p pin failed\n", bo);
312 static void __qxl_bo_unpin(struct qxl_bo *bo)
314 ttm_bo_unpin(&bo->tbo);
322 int qxl_bo_pin(struct qxl_bo *bo)
326 r = qxl_bo_reserve(bo);
330 r = __qxl_bo_pin(bo);
331 qxl_bo_unreserve(bo);
340 int qxl_bo_unpin(struct qxl_bo *bo)
344 r = qxl_bo_reserve(bo);
348 __qxl_bo_unpin(bo);
349 qxl_bo_unreserve(bo);
355 struct qxl_bo *bo, *n;
360 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) {
362 &bo->tbo.base, bo, (unsigned long)bo->tbo.base.size,
363 *((unsigned long *)&bo->tbo.base.refcount));
365 list_del_init(&bo->list);
367 /* this should unref the ttm bo */
368 drm_gem_object_put(&bo->tbo.base);
382 int qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo)
386 if (bo->type == QXL_GEM_DOMAIN_SURFACE && bo->surface_id == 0) {
388 ret = qxl_surface_id_alloc(qdev, bo);
392 ret = qxl_hw_surface_alloc(qdev, bo);