Lines Matching refs:err

106 	int err;
108 err = i915_gem_object_set_tiling(obj, tile->tiling, tile->stride);
109 if (err) {
110 pr_err("Failed to set tiling mode=%u, stride=%u, err=%d\n",
111 tile->tiling, tile->stride, err);
112 return err;
119 err = i915_gem_object_set_to_gtt_domain(obj, true);
121 if (err) {
122 pr_err("Failed to flush to GTT write domain; err=%d\n", err);
123 return err;
131 pr_err("Failed to pin partial view: offset=%lu; err=%d\n",
142 pr_err("Failed to iomap partial view: offset=%lu; err=%d\n",
144 err = PTR_ERR(io);
172 err = -EINVAL;
182 return err;
194 int err;
196 err = i915_gem_object_set_tiling(obj, tile->tiling, tile->stride);
197 if (err) {
198 pr_err("Failed to set tiling mode=%u, stride=%u, err=%d\n",
199 tile->tiling, tile->stride, err);
200 return err;
207 err = i915_gem_object_set_to_gtt_domain(obj, true);
209 if (err) {
210 pr_err("Failed to flush to GTT write domain; err=%d\n", err);
211 return err;
228 pr_err("Failed to pin partial view: offset=%lu; err=%d\n",
239 pr_err("Failed to iomap partial view: offset=%lu; err=%d\n",
268 err = -EINVAL;
273 if (err)
274 return err;
322 int err;
341 err = i915_gem_object_pin_pages_unlocked(obj);
342 if (err) {
343 pr_err("Failed to allocate %u pages (%lu total), err=%d\n",
344 nreal, obj->base.size / PAGE_SIZE, err);
361 err = check_partial_mappings(obj, &tile, end);
362 if (err && err != -EINTR)
399 err = check_partial_mappings(obj, &tile, end);
400 if (err == -EINTR)
402 if (err)
407 err = check_partial_mappings(obj, &tile, end);
408 if (err == -EINTR)
410 if (err)
416 err = check_partial_mappings(obj, &tile, end);
417 if (err == -EINTR)
419 if (err)
427 err = check_partial_mappings(obj, &tile, end);
428 if (err == -EINTR)
430 if (err)
443 return err;
455 int err;
478 err = i915_gem_object_pin_pages_unlocked(obj);
479 if (err) {
480 pr_err("Failed to allocate %u pages (%lu total), err=%d\n",
481 nreal, obj->base.size / PAGE_SIZE, err);
524 err = check_partial_mapping(obj, &tile, &prng);
525 if (err)
537 return err;
549 int err;
557 err = i915_gem_object_lock(obj, &ww);
558 if (!err)
559 err = i915_vma_pin_ww(vma, &ww, 0, 0, PIN_USER);
560 if (err)
561 goto err;
565 err = PTR_ERR(rq);
569 err = i915_vma_move_to_active(vma, rq,
575 err:
576 if (err == -EDEADLK) {
577 err = i915_gem_ww_ctx_backoff(&ww);
578 if (!err)
582 if (err)
583 return err;
662 int loop, err = 0;
680 err = -ENOMEM;
697 err = drm_mm_reserve_node(mm, resv);
698 if (err) {
699 pr_err("Failed to trim VMA manager, err=%d\n", err);
710 err = -EINVAL;
717 err = -EINVAL;
724 err = PTR_ERR(obj);
729 err = __assign_mmap_offset(obj, default_mapping(i915), &offset, NULL);
730 if (err) {
737 err = -EINVAL;
750 err = PTR_ERR(obj);
754 err = make_obj_busy(obj);
755 if (err) {
773 return err;
784 int err = 0;
794 err = PTR_ERR(map);
803 return err;
811 int err = 0;
821 err = PTR_ERR(map);
828 err = -EINVAL;
834 return err;
855 int err = 0;
864 err = -EINVAL;
868 return err;
901 int err, i;
907 err = wc_set(obj);
908 if (err == -ENXIO)
909 err = gtt_set(obj);
910 if (err)
911 return err;
913 err = __assign_mmap_offset(obj, type, &offset, NULL);
914 if (err)
915 return err;
929 err = -EINVAL;
940 err = -EFAULT;
948 err = -EINVAL;
956 err = -EFAULT;
964 err = wc_check(obj);
965 if (err == -ENXIO)
966 err = gtt_check(obj);
969 return err;
991 int err;
1000 err = __igt_mmap(i915, obj, I915_MMAP_TYPE_GTT);
1001 if (err == 0)
1002 err = __igt_mmap(i915, obj, I915_MMAP_TYPE_WC);
1003 if (err == 0)
1004 err = __igt_mmap(i915, obj, I915_MMAP_TYPE_FIXED);
1007 if (err)
1008 return err;
1054 int err;
1063 err = PTR_ERR(obj);
1069 err = i915_gem_object_pin_pages_unlocked(obj);
1070 if (err) {
1071 if (err != -ENXIO && err != -ENOMEM)
1075 err = 0;
1091 return err;
1100 int err = 0, i;
1111 err = -EINVAL;
1120 err = -EFAULT;
1133 err = -EINVAL;
1141 err = -EINVAL;
1149 err = -EFAULT;
1155 if (err == -EFAULT)
1156 err = 0;
1159 err = wc_check(obj);
1163 return err;
1182 int err;
1193 err = __assign_mmap_offset(obj, I915_MMAP_TYPE_FIXED, &offset, NULL);
1194 if (err)
1206 err = addr;
1211 err = igt_fill_mappable(placements[0], &objects);
1212 if (err)
1216 err = i915_gem_object_lock(obj, NULL);
1217 if (err)
1220 err = i915_gem_object_pin_pages(obj);
1221 if (err) {
1226 err = intel_context_migrate_clear(to_gt(i915)->migrate.context, NULL,
1232 err = dma_resv_reserve_fences(obj->base.resv, 1);
1233 if (!err)
1239 if (err)
1246 err = i915_gem_object_lock(obj, NULL);
1247 if (err)
1254 err = i915_gem_object_wait_moving_fence(obj, true);
1256 if (err)
1261 err = ___igt_mmap_migrate(i915, obj, addr,
1264 if (!err && obj->mm.region != expected_mr) {
1266 err = -EINVAL;
1284 err = -EINVAL;
1297 err = -EINVAL;
1304 return err;
1319 int err;
1347 err = __igt_mmap_migrate(mixed, ARRAY_SIZE(mixed), mr, 0);
1348 if (err)
1355 err = __igt_mmap_migrate(single, ARRAY_SIZE(single), mr,
1359 if (err)
1366 err = __igt_mmap_migrate(mixed, ARRAY_SIZE(mixed), system,
1369 if (err)
1377 err = __igt_mmap_migrate(single, ARRAY_SIZE(single), mr,
1381 if (err)
1392 err = __igt_mmap_migrate(single, ARRAY_SIZE(single), mr,
1402 if (err)
1403 return err;
1441 int err;
1450 err = __assign_mmap_offset(obj, type, &offset, NULL);
1451 if (err)
1452 return err;
1459 err = __put_user(A, ptr);
1460 if (err) {
1468 err = access_process_vm(current, addr, &x, sizeof(x), 0);
1469 if (err != sizeof(x)) {
1475 err = access_process_vm(current, addr, &B, sizeof(B), FOLL_WRITE);
1476 if (err != sizeof(B)) {
1484 err = __get_user(y, ptr);
1485 if (err) {
1495 err = -EINVAL;
1501 return err;
1512 int err;
1524 err = __igt_mmap_access(i915, obj, I915_MMAP_TYPE_GTT);
1525 if (err == 0)
1526 err = __igt_mmap_access(i915, obj, I915_MMAP_TYPE_WB);
1527 if (err == 0)
1528 err = __igt_mmap_access(i915, obj, I915_MMAP_TYPE_WC);
1529 if (err == 0)
1530 err = __igt_mmap_access(i915, obj, I915_MMAP_TYPE_UC);
1531 if (err == 0)
1532 err = __igt_mmap_access(i915, obj, I915_MMAP_TYPE_FIXED);
1535 if (err)
1536 return err;
1550 int err;
1562 err = wc_set(obj);
1563 if (err == -ENXIO)
1564 err = gtt_set(obj);
1565 if (err)
1566 return err;
1568 err = __assign_mmap_offset(obj, type, &offset, NULL);
1569 if (err)
1570 return err;
1580 err = -EFAULT;
1594 err = PTR_ERR(vma);
1600 err = i915_gem_object_lock(obj, &ww);
1601 if (!err)
1602 err = i915_vma_pin_ww(vma, &ww, 0, 0, PIN_USER);
1603 if (err)
1608 err = PTR_ERR(rq);
1612 err = i915_vma_move_to_active(vma, rq, 0);
1614 err = engine->emit_bb_start(rq, i915_vma_offset(vma), 0, 0);
1628 err = -EIO;
1635 if (err == -EDEADLK) {
1636 err = i915_gem_ww_ctx_backoff(&ww);
1637 if (!err)
1641 if (err)
1647 return err;
1658 int err;
1670 err = __igt_mmap_gpu(i915, obj, I915_MMAP_TYPE_GTT);
1671 if (err == 0)
1672 err = __igt_mmap_gpu(i915, obj, I915_MMAP_TYPE_WC);
1673 if (err == 0)
1674 err = __igt_mmap_gpu(i915, obj, I915_MMAP_TYPE_FIXED);
1677 if (err)
1678 return err;
1726 int err;
1732 err = __get_user(c, addr);
1733 if (err)
1734 return err;
1745 int err;
1751 err = __assign_mmap_offset(obj, type, &offset, NULL);
1752 if (err)
1753 return err;
1759 err = prefault_range(addr, obj->base.size);
1760 if (err)
1763 err = check_present(addr, obj->base.size);
1764 if (err) {
1775 err = i915_gem_object_unbind(obj, I915_GEM_OBJECT_UNBIND_ACTIVE);
1777 if (err) {
1788 err = -EINVAL;
1793 err = check_absent(addr, obj->base.size);
1794 if (err) {
1801 return err;
1812 int err;
1824 err = __igt_mmap_revoke(i915, obj, I915_MMAP_TYPE_GTT);
1825 if (err == 0)
1826 err = __igt_mmap_revoke(i915, obj, I915_MMAP_TYPE_WC);
1827 if (err == 0)
1828 err = __igt_mmap_revoke(i915, obj, I915_MMAP_TYPE_FIXED);
1831 if (err)
1832 return err;