1/*
2 * Copyright �� 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 */
24
25#include <linux/prime_numbers.h>
26
27#include "gem/i915_gem_context.h"
28#include "gem/i915_gem_internal.h"
29#include "gem/selftests/mock_context.h"
30
31#include "i915_scatterlist.h"
32#include "i915_selftest.h"
33
34#include "mock_gem_device.h"
35#include "mock_gtt.h"
36
37static bool assert_vma(struct i915_vma *vma,
38		       struct drm_i915_gem_object *obj,
39		       struct i915_gem_context *ctx)
40{
41	bool ok = true;
42
43	if (vma->vm != ctx->vm) {
44		pr_err("VMA created with wrong VM\n");
45		ok = false;
46	}
47
48	if (vma->size != obj->base.size) {
49		pr_err("VMA created with wrong size, found %llu, expected %zu\n",
50		       vma->size, obj->base.size);
51		ok = false;
52	}
53
54	if (vma->gtt_view.type != I915_GTT_VIEW_NORMAL) {
55		pr_err("VMA created with wrong type [%d]\n",
56		       vma->gtt_view.type);
57		ok = false;
58	}
59
60	return ok;
61}
62
63static struct i915_vma *
64checked_vma_instance(struct drm_i915_gem_object *obj,
65		     struct i915_address_space *vm,
66		     const struct i915_gtt_view *view)
67{
68	struct i915_vma *vma;
69	bool ok = true;
70
71	vma = i915_vma_instance(obj, vm, view);
72	if (IS_ERR(vma))
73		return vma;
74
75	/* Manual checks, will be reinforced by i915_vma_compare! */
76	if (vma->vm != vm) {
77		pr_err("VMA's vm [%p] does not match request [%p]\n",
78		       vma->vm, vm);
79		ok = false;
80	}
81
82	if (i915_is_ggtt(vm) != i915_vma_is_ggtt(vma)) {
83		pr_err("VMA ggtt status [%d] does not match parent [%d]\n",
84		       i915_vma_is_ggtt(vma), i915_is_ggtt(vm));
85		ok = false;
86	}
87
88	if (i915_vma_compare(vma, vm, view)) {
89		pr_err("i915_vma_compare failed with create parameters!\n");
90		return ERR_PTR(-EINVAL);
91	}
92
93	if (i915_vma_compare(vma, vma->vm,
94			     i915_vma_is_ggtt(vma) ? &vma->gtt_view : NULL)) {
95		pr_err("i915_vma_compare failed with itself\n");
96		return ERR_PTR(-EINVAL);
97	}
98
99	if (!ok) {
100		pr_err("i915_vma_compare failed to detect the difference!\n");
101		return ERR_PTR(-EINVAL);
102	}
103
104	return vma;
105}
106
107static int create_vmas(struct drm_i915_private *i915,
108		       struct list_head *objects,
109		       struct list_head *contexts)
110{
111	struct drm_i915_gem_object *obj;
112	struct i915_gem_context *ctx;
113	int pinned;
114
115	list_for_each_entry(obj, objects, st_link) {
116		for (pinned = 0; pinned <= 1; pinned++) {
117			list_for_each_entry(ctx, contexts, link) {
118				struct i915_address_space *vm;
119				struct i915_vma *vma;
120				int err;
121
122				vm = i915_gem_context_get_eb_vm(ctx);
123				vma = checked_vma_instance(obj, vm, NULL);
124				i915_vm_put(vm);
125				if (IS_ERR(vma))
126					return PTR_ERR(vma);
127
128				if (!assert_vma(vma, obj, ctx)) {
129					pr_err("VMA lookup/create failed\n");
130					return -EINVAL;
131				}
132
133				if (!pinned) {
134					err = i915_vma_pin(vma, 0, 0, PIN_USER);
135					if (err) {
136						pr_err("Failed to pin VMA\n");
137						return err;
138					}
139				} else {
140					i915_vma_unpin(vma);
141				}
142			}
143		}
144	}
145
146	return 0;
147}
148
149static int igt_vma_create(void *arg)
150{
151	struct i915_ggtt *ggtt = arg;
152	struct drm_i915_private *i915 = ggtt->vm.i915;
153	struct drm_i915_gem_object *obj, *on;
154	struct i915_gem_context *ctx, *cn;
155	unsigned long num_obj, num_ctx;
156	unsigned long no, nc;
157	IGT_TIMEOUT(end_time);
158	LIST_HEAD(contexts);
159	LIST_HEAD(objects);
160	int err = -ENOMEM;
161
162	/* Exercise creating many vma amonst many objections, checking the
163	 * vma creation and lookup routines.
164	 */
165
166	no = 0;
167	for_each_prime_number(num_obj, ULONG_MAX - 1) {
168		for (; no < num_obj; no++) {
169			obj = i915_gem_object_create_internal(i915, PAGE_SIZE);
170			if (IS_ERR(obj))
171				goto out;
172
173			list_add(&obj->st_link, &objects);
174		}
175
176		nc = 0;
177		for_each_prime_number(num_ctx, 2 * BITS_PER_LONG) {
178			for (; nc < num_ctx; nc++) {
179				ctx = mock_context(i915, "mock");
180				if (!ctx)
181					goto out;
182
183				list_move(&ctx->link, &contexts);
184			}
185
186			err = create_vmas(i915, &objects, &contexts);
187			if (err)
188				goto out;
189
190			if (igt_timeout(end_time,
191					"%s timed out: after %lu objects in %lu contexts\n",
192					__func__, no, nc))
193				goto end;
194		}
195
196		list_for_each_entry_safe(ctx, cn, &contexts, link) {
197			list_del_init(&ctx->link);
198			mock_context_close(ctx);
199		}
200
201		cond_resched();
202	}
203
204end:
205	/* Final pass to lookup all created contexts */
206	err = create_vmas(i915, &objects, &contexts);
207out:
208	list_for_each_entry_safe(ctx, cn, &contexts, link) {
209		list_del_init(&ctx->link);
210		mock_context_close(ctx);
211	}
212
213	list_for_each_entry_safe(obj, on, &objects, st_link)
214		i915_gem_object_put(obj);
215	return err;
216}
217
218struct pin_mode {
219	u64 size;
220	u64 flags;
221	bool (*assert)(const struct i915_vma *,
222		       const struct pin_mode *mode,
223		       int result);
224	const char *string;
225};
226
227static bool assert_pin_valid(const struct i915_vma *vma,
228			     const struct pin_mode *mode,
229			     int result)
230{
231	if (result)
232		return false;
233
234	if (i915_vma_misplaced(vma, mode->size, 0, mode->flags))
235		return false;
236
237	return true;
238}
239
240__maybe_unused
241static bool assert_pin_enospc(const struct i915_vma *vma,
242			      const struct pin_mode *mode,
243			      int result)
244{
245	return result == -ENOSPC;
246}
247
248__maybe_unused
249static bool assert_pin_einval(const struct i915_vma *vma,
250			      const struct pin_mode *mode,
251			      int result)
252{
253	return result == -EINVAL;
254}
255
256static int igt_vma_pin1(void *arg)
257{
258	struct i915_ggtt *ggtt = arg;
259	const struct pin_mode modes[] = {
260#define VALID(sz, fl) { .size = (sz), .flags = (fl), .assert = assert_pin_valid, .string = #sz ", " #fl ", (valid) " }
261#define __INVALID(sz, fl, check, eval) { .size = (sz), .flags = (fl), .assert = (check), .string = #sz ", " #fl ", (invalid " #eval ")" }
262#define INVALID(sz, fl) __INVALID(sz, fl, assert_pin_einval, EINVAL)
263#define NOSPACE(sz, fl) __INVALID(sz, fl, assert_pin_enospc, ENOSPC)
264		VALID(0, PIN_GLOBAL),
265		VALID(0, PIN_GLOBAL | PIN_MAPPABLE),
266
267		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 4096),
268		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | 8192),
269		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
270		VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
271		VALID(0, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
272
273		VALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
274		INVALID(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | ggtt->mappable_end),
275		VALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
276		INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | ggtt->vm.total),
277		INVALID(0, PIN_GLOBAL | PIN_OFFSET_FIXED | round_down(U64_MAX, PAGE_SIZE)),
278
279		VALID(4096, PIN_GLOBAL),
280		VALID(8192, PIN_GLOBAL),
281		VALID(ggtt->mappable_end - 4096, PIN_GLOBAL | PIN_MAPPABLE),
282		VALID(ggtt->mappable_end, PIN_GLOBAL | PIN_MAPPABLE),
283		NOSPACE(ggtt->mappable_end + 4096, PIN_GLOBAL | PIN_MAPPABLE),
284		VALID(ggtt->vm.total - 4096, PIN_GLOBAL),
285		VALID(ggtt->vm.total, PIN_GLOBAL),
286		NOSPACE(ggtt->vm.total + 4096, PIN_GLOBAL),
287		NOSPACE(round_down(U64_MAX, PAGE_SIZE), PIN_GLOBAL),
288		INVALID(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_FIXED | (ggtt->mappable_end - 4096)),
289		INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (ggtt->vm.total - 4096)),
290		INVALID(8192, PIN_GLOBAL | PIN_OFFSET_FIXED | (round_down(U64_MAX, PAGE_SIZE) - 4096)),
291
292		VALID(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
293
294#if !IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM)
295		/* Misusing BIAS is a programming error (it is not controllable
296		 * from userspace) so when debugging is enabled, it explodes.
297		 * However, the tests are still quite interesting for checking
298		 * variable start, end and size.
299		 */
300		NOSPACE(0, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | ggtt->mappable_end),
301		NOSPACE(0, PIN_GLOBAL | PIN_OFFSET_BIAS | ggtt->vm.total),
302		NOSPACE(8192, PIN_GLOBAL | PIN_MAPPABLE | PIN_OFFSET_BIAS | (ggtt->mappable_end - 4096)),
303		NOSPACE(8192, PIN_GLOBAL | PIN_OFFSET_BIAS | (ggtt->vm.total - 4096)),
304#endif
305		{ },
306#undef NOSPACE
307#undef INVALID
308#undef __INVALID
309#undef VALID
310	}, *m;
311	struct drm_i915_gem_object *obj;
312	struct i915_vma *vma;
313	int err = -EINVAL;
314
315	/* Exercise all the weird and wonderful i915_vma_pin requests,
316	 * focusing on error handling of boundary conditions.
317	 */
318
319	GEM_BUG_ON(!drm_mm_clean(&ggtt->vm.mm));
320
321	obj = i915_gem_object_create_internal(ggtt->vm.i915, PAGE_SIZE);
322	if (IS_ERR(obj))
323		return PTR_ERR(obj);
324
325	vma = checked_vma_instance(obj, &ggtt->vm, NULL);
326	if (IS_ERR(vma))
327		goto out;
328
329	for (m = modes; m->assert; m++) {
330		err = i915_vma_pin(vma, m->size, 0, m->flags);
331		if (!m->assert(vma, m, err)) {
332			pr_err("%s to pin single page into GGTT with mode[%d:%s]: size=%llx flags=%llx, err=%d\n",
333			       m->assert == assert_pin_valid ? "Failed" : "Unexpectedly succeeded",
334			       (int)(m - modes), m->string, m->size, m->flags,
335			       err);
336			if (!err)
337				i915_vma_unpin(vma);
338			err = -EINVAL;
339			goto out;
340		}
341
342		if (!err) {
343			i915_vma_unpin(vma);
344			err = i915_vma_unbind_unlocked(vma);
345			if (err) {
346				pr_err("Failed to unbind single page from GGTT, err=%d\n", err);
347				goto out;
348			}
349		}
350
351		cond_resched();
352	}
353
354	err = 0;
355out:
356	i915_gem_object_put(obj);
357	return err;
358}
359
360static unsigned long rotated_index(const struct intel_rotation_info *r,
361				   unsigned int n,
362				   unsigned int x,
363				   unsigned int y)
364{
365	return (r->plane[n].src_stride * (r->plane[n].height - y - 1) +
366		r->plane[n].offset + x);
367}
368
369static struct scatterlist *
370assert_rotated(struct drm_i915_gem_object *obj,
371	       const struct intel_rotation_info *r, unsigned int n,
372	       struct scatterlist *sg)
373{
374	unsigned int x, y;
375
376	for (x = 0; x < r->plane[n].width; x++) {
377		unsigned int left;
378
379		for (y = 0; y < r->plane[n].height; y++) {
380			unsigned long src_idx;
381			dma_addr_t src;
382
383			if (!sg) {
384				pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
385				       n, x, y);
386				return ERR_PTR(-EINVAL);
387			}
388
389			src_idx = rotated_index(r, n, x, y);
390			src = i915_gem_object_get_dma_address(obj, src_idx);
391
392			if (sg_dma_len(sg) != PAGE_SIZE) {
393				pr_err("Invalid sg.length, found %d, expected %lu for rotated page (%d, %d) [src index %lu]\n",
394				       sg_dma_len(sg), PAGE_SIZE,
395				       x, y, src_idx);
396				return ERR_PTR(-EINVAL);
397			}
398
399			if (sg_dma_address(sg) != src) {
400				pr_err("Invalid address for rotated page (%d, %d) [src index %lu]\n",
401				       x, y, src_idx);
402				return ERR_PTR(-EINVAL);
403			}
404
405			sg = sg_next(sg);
406		}
407
408		left = (r->plane[n].dst_stride - y) * PAGE_SIZE;
409
410		if (!left)
411			continue;
412
413		if (!sg) {
414			pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
415			       n, x, y);
416			return ERR_PTR(-EINVAL);
417		}
418
419		if (sg_dma_len(sg) != left) {
420			pr_err("Invalid sg.length, found %d, expected %u for rotated page (%d, %d)\n",
421			       sg_dma_len(sg), left, x, y);
422			return ERR_PTR(-EINVAL);
423		}
424
425		if (sg_dma_address(sg) != 0) {
426			pr_err("Invalid address, found %pad, expected 0 for remapped page (%d, %d)\n",
427			       &sg_dma_address(sg), x, y);
428			return ERR_PTR(-EINVAL);
429		}
430
431		sg = sg_next(sg);
432	}
433
434	return sg;
435}
436
437static unsigned long remapped_index(const struct intel_remapped_info *r,
438				    unsigned int n,
439				    unsigned int x,
440				    unsigned int y)
441{
442	return (r->plane[n].src_stride * y +
443		r->plane[n].offset + x);
444}
445
446static struct scatterlist *
447assert_remapped(struct drm_i915_gem_object *obj,
448		const struct intel_remapped_info *r, unsigned int n,
449		struct scatterlist *sg)
450{
451	unsigned int x, y;
452	unsigned int left = 0;
453	unsigned int offset;
454
455	for (y = 0; y < r->plane[n].height; y++) {
456		for (x = 0; x < r->plane[n].width; x++) {
457			unsigned long src_idx;
458			dma_addr_t src;
459
460			if (!sg) {
461				pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
462				       n, x, y);
463				return ERR_PTR(-EINVAL);
464			}
465			if (!left) {
466				offset = 0;
467				left = sg_dma_len(sg);
468			}
469
470			src_idx = remapped_index(r, n, x, y);
471			src = i915_gem_object_get_dma_address(obj, src_idx);
472
473			if (left < PAGE_SIZE || left & (PAGE_SIZE-1)) {
474				pr_err("Invalid sg.length, found %d, expected %lu for remapped page (%d, %d) [src index %lu]\n",
475				       sg_dma_len(sg), PAGE_SIZE,
476				       x, y, src_idx);
477				return ERR_PTR(-EINVAL);
478			}
479
480			if (sg_dma_address(sg) + offset != src) {
481				pr_err("Invalid address for remapped page (%d, %d) [src index %lu]\n",
482				       x, y, src_idx);
483				return ERR_PTR(-EINVAL);
484			}
485
486			left -= PAGE_SIZE;
487			offset += PAGE_SIZE;
488
489
490			if (!left)
491				sg = sg_next(sg);
492		}
493
494		if (left) {
495			pr_err("Unexpected sg tail with %d size for remapped page (%d, %d)\n",
496			       left,
497			       x, y);
498			return ERR_PTR(-EINVAL);
499		}
500
501		left = (r->plane[n].dst_stride - r->plane[n].width) * PAGE_SIZE;
502
503		if (!left)
504			continue;
505
506		if (!sg) {
507			pr_err("Invalid sg table: too short at plane %d, (%d, %d)!\n",
508			       n, x, y);
509			return ERR_PTR(-EINVAL);
510		}
511
512		if (sg_dma_len(sg) != left) {
513			pr_err("Invalid sg.length, found %u, expected %u for remapped page (%d, %d)\n",
514			       sg_dma_len(sg), left,
515			       x, y);
516			return ERR_PTR(-EINVAL);
517		}
518
519		if (sg_dma_address(sg) != 0) {
520			pr_err("Invalid address, found %pad, expected 0 for remapped page (%d, %d)\n",
521			       &sg_dma_address(sg),
522			       x, y);
523			return ERR_PTR(-EINVAL);
524		}
525
526		sg = sg_next(sg);
527		left = 0;
528	}
529
530	return sg;
531}
532
533static unsigned int remapped_size(enum i915_gtt_view_type view_type,
534				  const struct intel_remapped_plane_info *a,
535				  const struct intel_remapped_plane_info *b)
536{
537
538	if (view_type == I915_GTT_VIEW_ROTATED)
539		return a->dst_stride * a->width + b->dst_stride * b->width;
540	else
541		return a->dst_stride * a->height + b->dst_stride * b->height;
542}
543
544static int igt_vma_rotate_remap(void *arg)
545{
546	struct i915_ggtt *ggtt = arg;
547	struct i915_address_space *vm = &ggtt->vm;
548	struct drm_i915_gem_object *obj;
549	const struct intel_remapped_plane_info planes[] = {
550		{ .width = 1, .height = 1, .src_stride = 1 },
551		{ .width = 2, .height = 2, .src_stride = 2 },
552		{ .width = 4, .height = 4, .src_stride = 4 },
553		{ .width = 8, .height = 8, .src_stride = 8 },
554
555		{ .width = 3, .height = 5, .src_stride = 3 },
556		{ .width = 3, .height = 5, .src_stride = 4 },
557		{ .width = 3, .height = 5, .src_stride = 5 },
558
559		{ .width = 5, .height = 3, .src_stride = 5 },
560		{ .width = 5, .height = 3, .src_stride = 7 },
561		{ .width = 5, .height = 3, .src_stride = 9 },
562
563		{ .width = 4, .height = 6, .src_stride = 6 },
564		{ .width = 6, .height = 4, .src_stride = 6 },
565
566		{ .width = 2, .height = 2, .src_stride = 2, .dst_stride = 2 },
567		{ .width = 3, .height = 3, .src_stride = 3, .dst_stride = 4 },
568		{ .width = 5, .height = 6, .src_stride = 7, .dst_stride = 8 },
569
570		{ }
571	}, *a, *b;
572	enum i915_gtt_view_type types[] = {
573		I915_GTT_VIEW_ROTATED,
574		I915_GTT_VIEW_REMAPPED,
575		0,
576	}, *t;
577	const unsigned int max_pages = 64;
578	int err = -ENOMEM;
579
580	/* Create VMA for many different combinations of planes and check
581	 * that the page layout within the rotated VMA match our expectations.
582	 */
583
584	obj = i915_gem_object_create_internal(vm->i915, max_pages * PAGE_SIZE);
585	if (IS_ERR(obj))
586		goto out;
587
588	for (t = types; *t; t++) {
589	for (a = planes; a->width; a++) {
590		for (b = planes + ARRAY_SIZE(planes); b-- != planes; ) {
591			struct i915_gtt_view view = {
592				.type = *t,
593				.remapped.plane[0] = *a,
594				.remapped.plane[1] = *b,
595			};
596			struct intel_remapped_plane_info *plane_info = view.remapped.plane;
597			unsigned int n, max_offset;
598
599			max_offset = max(plane_info[0].src_stride * plane_info[0].height,
600					 plane_info[1].src_stride * plane_info[1].height);
601			GEM_BUG_ON(max_offset > max_pages);
602			max_offset = max_pages - max_offset;
603
604			if (!plane_info[0].dst_stride)
605				plane_info[0].dst_stride = view.type == I915_GTT_VIEW_ROTATED ?
606									plane_info[0].height :
607									plane_info[0].width;
608			if (!plane_info[1].dst_stride)
609				plane_info[1].dst_stride = view.type == I915_GTT_VIEW_ROTATED ?
610									plane_info[1].height :
611									plane_info[1].width;
612
613			for_each_prime_number_from(plane_info[0].offset, 0, max_offset) {
614				for_each_prime_number_from(plane_info[1].offset, 0, max_offset) {
615					struct scatterlist *sg;
616					struct i915_vma *vma;
617					unsigned int expected_pages;
618
619					vma = checked_vma_instance(obj, vm, &view);
620					if (IS_ERR(vma)) {
621						err = PTR_ERR(vma);
622						goto out_object;
623					}
624
625					err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
626					if (err) {
627						pr_err("Failed to pin VMA, err=%d\n", err);
628						goto out_object;
629					}
630
631					expected_pages = remapped_size(view.type, &plane_info[0], &plane_info[1]);
632
633					if (view.type == I915_GTT_VIEW_ROTATED &&
634					    vma->size != expected_pages * PAGE_SIZE) {
635						pr_err("VMA is wrong size, expected %lu, found %llu\n",
636						       PAGE_SIZE * expected_pages, vma->size);
637						err = -EINVAL;
638						goto out_object;
639					}
640
641					if (view.type == I915_GTT_VIEW_REMAPPED &&
642					    vma->size > expected_pages * PAGE_SIZE) {
643						pr_err("VMA is wrong size, expected %lu, found %llu\n",
644						       PAGE_SIZE * expected_pages, vma->size);
645						err = -EINVAL;
646						goto out_object;
647					}
648
649					if (vma->pages->nents > expected_pages) {
650						pr_err("sg table is wrong sizeo, expected %u, found %u nents\n",
651						       expected_pages, vma->pages->nents);
652						err = -EINVAL;
653						goto out_object;
654					}
655
656					if (vma->node.size < vma->size) {
657						pr_err("VMA binding too small, expected %llu, found %llu\n",
658						       vma->size, vma->node.size);
659						err = -EINVAL;
660						goto out_object;
661					}
662
663					if (vma->pages == obj->mm.pages) {
664						pr_err("VMA using unrotated object pages!\n");
665						err = -EINVAL;
666						goto out_object;
667					}
668
669					sg = vma->pages->sgl;
670					for (n = 0; n < ARRAY_SIZE(view.rotated.plane); n++) {
671						if (view.type == I915_GTT_VIEW_ROTATED)
672							sg = assert_rotated(obj, &view.rotated, n, sg);
673						else
674							sg = assert_remapped(obj, &view.remapped, n, sg);
675						if (IS_ERR(sg)) {
676							pr_err("Inconsistent %s VMA pages for plane %d: [(%d, %d, %d, %d, %d), (%d, %d, %d, %d, %d)]\n",
677							       view.type == I915_GTT_VIEW_ROTATED ?
678							       "rotated" : "remapped", n,
679							       plane_info[0].width,
680							       plane_info[0].height,
681							       plane_info[0].src_stride,
682							       plane_info[0].dst_stride,
683							       plane_info[0].offset,
684							       plane_info[1].width,
685							       plane_info[1].height,
686							       plane_info[1].src_stride,
687							       plane_info[1].dst_stride,
688							       plane_info[1].offset);
689							err = -EINVAL;
690							goto out_object;
691						}
692					}
693
694					i915_vma_unpin(vma);
695					err = i915_vma_unbind_unlocked(vma);
696					if (err) {
697						pr_err("Unbinding returned %i\n", err);
698						goto out_object;
699					}
700					cond_resched();
701				}
702			}
703		}
704	}
705	}
706
707out_object:
708	i915_gem_object_put(obj);
709out:
710	return err;
711}
712
713static bool assert_partial(struct drm_i915_gem_object *obj,
714			   struct i915_vma *vma,
715			   unsigned long offset,
716			   unsigned long size)
717{
718	struct sgt_iter sgt;
719	dma_addr_t dma;
720
721	for_each_sgt_daddr(dma, sgt, vma->pages) {
722		dma_addr_t src;
723
724		if (!size) {
725			pr_err("Partial scattergather list too long\n");
726			return false;
727		}
728
729		src = i915_gem_object_get_dma_address(obj, offset);
730		if (src != dma) {
731			pr_err("DMA mismatch for partial page offset %lu\n",
732			       offset);
733			return false;
734		}
735
736		offset++;
737		size--;
738	}
739
740	return true;
741}
742
743static bool assert_pin(struct i915_vma *vma,
744		       struct i915_gtt_view *view,
745		       u64 size,
746		       const char *name)
747{
748	bool ok = true;
749
750	if (vma->size != size) {
751		pr_err("(%s) VMA is wrong size, expected %llu, found %llu\n",
752		       name, size, vma->size);
753		ok = false;
754	}
755
756	if (vma->node.size < vma->size) {
757		pr_err("(%s) VMA binding too small, expected %llu, found %llu\n",
758		       name, vma->size, vma->node.size);
759		ok = false;
760	}
761
762	if (view && view->type != I915_GTT_VIEW_NORMAL) {
763		if (memcmp(&vma->gtt_view, view, sizeof(*view))) {
764			pr_err("(%s) VMA mismatch upon creation!\n",
765			       name);
766			ok = false;
767		}
768
769		if (vma->pages == vma->obj->mm.pages) {
770			pr_err("(%s) VMA using original object pages!\n",
771			       name);
772			ok = false;
773		}
774	} else {
775		if (vma->gtt_view.type != I915_GTT_VIEW_NORMAL) {
776			pr_err("Not the normal ggtt view! Found %d\n",
777			       vma->gtt_view.type);
778			ok = false;
779		}
780
781		if (vma->pages != vma->obj->mm.pages) {
782			pr_err("VMA not using object pages!\n");
783			ok = false;
784		}
785	}
786
787	return ok;
788}
789
790static int igt_vma_partial(void *arg)
791{
792	struct i915_ggtt *ggtt = arg;
793	struct i915_address_space *vm = &ggtt->vm;
794	const unsigned int npages = 1021; /* prime! */
795	struct drm_i915_gem_object *obj;
796	const struct phase {
797		const char *name;
798	} phases[] = {
799		{ "create" },
800		{ "lookup" },
801		{ },
802	}, *p;
803	unsigned int sz, offset;
804	struct i915_vma *vma;
805	int err = -ENOMEM;
806
807	/* Create lots of different VMA for the object and check that
808	 * we are returned the same VMA when we later request the same range.
809	 */
810
811	obj = i915_gem_object_create_internal(vm->i915, npages * PAGE_SIZE);
812	if (IS_ERR(obj))
813		goto out;
814
815	for (p = phases; p->name; p++) { /* exercise both create/lookup */
816		unsigned int count, nvma;
817
818		nvma = 0;
819		for_each_prime_number_from(sz, 1, npages) {
820			for_each_prime_number_from(offset, 0, npages - sz) {
821				struct i915_gtt_view view;
822
823				view.type = I915_GTT_VIEW_PARTIAL;
824				view.partial.offset = offset;
825				view.partial.size = sz;
826
827				if (sz == npages)
828					view.type = I915_GTT_VIEW_NORMAL;
829
830				vma = checked_vma_instance(obj, vm, &view);
831				if (IS_ERR(vma)) {
832					err = PTR_ERR(vma);
833					goto out_object;
834				}
835
836				err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
837				if (err)
838					goto out_object;
839
840				if (!assert_pin(vma, &view, sz*PAGE_SIZE, p->name)) {
841					pr_err("(%s) Inconsistent partial pinning for (offset=%d, size=%d)\n",
842					       p->name, offset, sz);
843					err = -EINVAL;
844					goto out_object;
845				}
846
847				if (!assert_partial(obj, vma, offset, sz)) {
848					pr_err("(%s) Inconsistent partial pages for (offset=%d, size=%d)\n",
849					       p->name, offset, sz);
850					err = -EINVAL;
851					goto out_object;
852				}
853
854				i915_vma_unpin(vma);
855				nvma++;
856				err = i915_vma_unbind_unlocked(vma);
857				if (err) {
858					pr_err("Unbinding returned %i\n", err);
859					goto out_object;
860				}
861
862				cond_resched();
863			}
864		}
865
866		count = 0;
867		list_for_each_entry(vma, &obj->vma.list, obj_link)
868			count++;
869		if (count != nvma) {
870			pr_err("(%s) All partial vma were not recorded on the obj->vma_list: found %u, expected %u\n",
871			       p->name, count, nvma);
872			err = -EINVAL;
873			goto out_object;
874		}
875
876		/* Check that we did create the whole object mapping */
877		vma = checked_vma_instance(obj, vm, NULL);
878		if (IS_ERR(vma)) {
879			err = PTR_ERR(vma);
880			goto out_object;
881		}
882
883		err = i915_vma_pin(vma, 0, 0, PIN_GLOBAL);
884		if (err)
885			goto out_object;
886
887		if (!assert_pin(vma, NULL, obj->base.size, p->name)) {
888			pr_err("(%s) inconsistent full pin\n", p->name);
889			err = -EINVAL;
890			goto out_object;
891		}
892
893		i915_vma_unpin(vma);
894
895		err = i915_vma_unbind_unlocked(vma);
896		if (err) {
897			pr_err("Unbinding returned %i\n", err);
898			goto out_object;
899		}
900
901		count = 0;
902		list_for_each_entry(vma, &obj->vma.list, obj_link)
903			count++;
904		if (count != nvma) {
905			pr_err("(%s) allocated an extra full vma!\n", p->name);
906			err = -EINVAL;
907			goto out_object;
908		}
909	}
910
911out_object:
912	i915_gem_object_put(obj);
913out:
914	return err;
915}
916
917int i915_vma_mock_selftests(void)
918{
919	static const struct i915_subtest tests[] = {
920		SUBTEST(igt_vma_create),
921		SUBTEST(igt_vma_pin1),
922		SUBTEST(igt_vma_rotate_remap),
923		SUBTEST(igt_vma_partial),
924	};
925	struct drm_i915_private *i915;
926	struct intel_gt *gt;
927	int err;
928
929	i915 = mock_gem_device();
930	if (!i915)
931		return -ENOMEM;
932
933	/* allocate the ggtt */
934	err = intel_gt_assign_ggtt(to_gt(i915));
935	if (err)
936		goto out_put;
937
938	gt = to_gt(i915);
939
940	mock_init_ggtt(gt);
941
942	err = i915_subtests(tests, gt->ggtt);
943
944	mock_device_flush(i915);
945	i915_gem_drain_freed_objects(i915);
946	mock_fini_ggtt(gt->ggtt);
947
948out_put:
949	mock_destroy_device(i915);
950	return err;
951}
952
953static int igt_vma_remapped_gtt(void *arg)
954{
955	struct drm_i915_private *i915 = arg;
956	const struct intel_remapped_plane_info planes[] = {
957		{ .width = 1, .height = 1, .src_stride = 1 },
958		{ .width = 2, .height = 2, .src_stride = 2 },
959		{ .width = 4, .height = 4, .src_stride = 4 },
960		{ .width = 8, .height = 8, .src_stride = 8 },
961
962		{ .width = 3, .height = 5, .src_stride = 3 },
963		{ .width = 3, .height = 5, .src_stride = 4 },
964		{ .width = 3, .height = 5, .src_stride = 5 },
965
966		{ .width = 5, .height = 3, .src_stride = 5 },
967		{ .width = 5, .height = 3, .src_stride = 7 },
968		{ .width = 5, .height = 3, .src_stride = 9 },
969
970		{ .width = 4, .height = 6, .src_stride = 6 },
971		{ .width = 6, .height = 4, .src_stride = 6 },
972
973		{ .width = 2, .height = 2, .src_stride = 2, .dst_stride = 2 },
974		{ .width = 3, .height = 3, .src_stride = 3, .dst_stride = 4 },
975		{ .width = 5, .height = 6, .src_stride = 7, .dst_stride = 8 },
976
977		{ }
978	}, *p;
979	enum i915_gtt_view_type types[] = {
980		I915_GTT_VIEW_ROTATED,
981		I915_GTT_VIEW_REMAPPED,
982		0,
983	}, *t;
984	struct drm_i915_gem_object *obj;
985	intel_wakeref_t wakeref;
986	int err = 0;
987
988	if (!i915_ggtt_has_aperture(to_gt(i915)->ggtt))
989		return 0;
990
991	obj = i915_gem_object_create_internal(i915, 10 * 10 * PAGE_SIZE);
992	if (IS_ERR(obj))
993		return PTR_ERR(obj);
994
995	wakeref = intel_runtime_pm_get(&i915->runtime_pm);
996
997	for (t = types; *t; t++) {
998		for (p = planes; p->width; p++) {
999			struct i915_gtt_view view = {
1000				.type = *t,
1001				.rotated.plane[0] = *p,
1002			};
1003			struct intel_remapped_plane_info *plane_info = view.rotated.plane;
1004			struct i915_vma *vma;
1005			u32 __iomem *map;
1006			unsigned int x, y;
1007
1008			i915_gem_object_lock(obj, NULL);
1009			err = i915_gem_object_set_to_gtt_domain(obj, true);
1010			i915_gem_object_unlock(obj);
1011			if (err)
1012				goto out;
1013
1014			if (!plane_info[0].dst_stride)
1015				plane_info[0].dst_stride = *t == I915_GTT_VIEW_ROTATED ?
1016								 p->height : p->width;
1017
1018			vma = i915_gem_object_ggtt_pin(obj, &view, 0, 0, PIN_MAPPABLE);
1019			if (IS_ERR(vma)) {
1020				err = PTR_ERR(vma);
1021				goto out;
1022			}
1023
1024			GEM_BUG_ON(vma->gtt_view.type != *t);
1025
1026			map = i915_vma_pin_iomap(vma);
1027			i915_vma_unpin(vma);
1028			if (IS_ERR(map)) {
1029				err = PTR_ERR(map);
1030				goto out;
1031			}
1032
1033			for (y = 0 ; y < plane_info[0].height; y++) {
1034				for (x = 0 ; x < plane_info[0].width; x++) {
1035					unsigned int offset;
1036					u32 val = y << 16 | x;
1037
1038					if (*t == I915_GTT_VIEW_ROTATED)
1039						offset = (x * plane_info[0].dst_stride + y) * PAGE_SIZE;
1040					else
1041						offset = (y * plane_info[0].dst_stride + x) * PAGE_SIZE;
1042
1043					iowrite32(val, &map[offset / sizeof(*map)]);
1044				}
1045			}
1046
1047			i915_vma_unpin_iomap(vma);
1048
1049			vma = i915_gem_object_ggtt_pin(obj, NULL, 0, 0, PIN_MAPPABLE);
1050			if (IS_ERR(vma)) {
1051				err = PTR_ERR(vma);
1052				goto out;
1053			}
1054
1055			GEM_BUG_ON(vma->gtt_view.type != I915_GTT_VIEW_NORMAL);
1056
1057			map = i915_vma_pin_iomap(vma);
1058			i915_vma_unpin(vma);
1059			if (IS_ERR(map)) {
1060				err = PTR_ERR(map);
1061				goto out;
1062			}
1063
1064			for (y = 0 ; y < plane_info[0].height; y++) {
1065				for (x = 0 ; x < plane_info[0].width; x++) {
1066					unsigned int offset, src_idx;
1067					u32 exp = y << 16 | x;
1068					u32 val;
1069
1070					if (*t == I915_GTT_VIEW_ROTATED)
1071						src_idx = rotated_index(&view.rotated, 0, x, y);
1072					else
1073						src_idx = remapped_index(&view.remapped, 0, x, y);
1074					offset = src_idx * PAGE_SIZE;
1075
1076					val = ioread32(&map[offset / sizeof(*map)]);
1077					if (val != exp) {
1078						pr_err("%s VMA write test failed, expected 0x%x, found 0x%x\n",
1079						       *t == I915_GTT_VIEW_ROTATED ? "Rotated" : "Remapped",
1080						       exp, val);
1081						i915_vma_unpin_iomap(vma);
1082						err = -EINVAL;
1083						goto out;
1084					}
1085				}
1086			}
1087			i915_vma_unpin_iomap(vma);
1088
1089			cond_resched();
1090		}
1091	}
1092
1093out:
1094	intel_runtime_pm_put(&i915->runtime_pm, wakeref);
1095	i915_gem_object_put(obj);
1096
1097	return err;
1098}
1099
1100int i915_vma_live_selftests(struct drm_i915_private *i915)
1101{
1102	static const struct i915_subtest tests[] = {
1103		SUBTEST(igt_vma_remapped_gtt),
1104	};
1105
1106	return i915_live_subtests(tests, i915);
1107}
1108