Searched refs:migrate (Results 1 - 25 of 87) sorted by path

1234

/linux-master/tools/perf/scripts/python/
H A Dsched-migration.py191 def migrate(self, ts_list, new, old_cpu, new_cpu): member in class:TimeSlice
351 def migrate(self, headers, pid, prio, orig_cpu, dest_cpu): member in class:SchedEventProxy
353 ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
418 parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
/linux-master/arch/arm64/kvm/
H A Darm.c2137 init_psci_0_1_impl_state(kvm_host_psci_config, migrate);
/linux-master/arch/arm64/kvm/hyp/nvhe/
H A Dpsci-relay.c53 is_psci_0_1(migrate, func_id));
226 if (is_psci_0_1(cpu_off, func_id) || is_psci_0_1(migrate, func_id))
/linux-master/arch/powerpc/kvm/
H A Dbook3s_hv_uvmem.c90 #include <linux/migrate.h>
840 /* migrate any unmoved normal pfn to device pfns*/
/linux-master/arch/powerpc/mm/book3s64/
H A Diommu_api.c13 #include <linux/migrate.h>
/linux-master/arch/powerpc/platforms/pseries/
H A Dvas.c632 bool migrate)
660 if ((vcaps->nr_close_wins > creds) && !migrate)
674 if (migrate)
742 bool migrate)
749 if (migrate)
807 if (rc && !migrate)
819 if (!migrate && !--excess_creds)
631 reconfig_open_windows(struct vas_caps *vcaps, int creds, bool migrate) argument
741 reconfig_close_windows(struct vas_caps *vcap, int excess_creds, bool migrate) argument
/linux-master/drivers/firmware/psci/
H A Dpsci.c46 * calls to its resident CPU, so we must avoid issuing those. We never migrate
244 return __psci_migrate(psci_0_1_function_ids.migrate, cpuid);
621 .migrate = psci_0_2_migrate,
718 if (!of_property_read_u32(np, "migrate", &id)) {
719 psci_0_1_function_ids.migrate = id;
720 psci_ops.migrate = psci_0_1_migrate;
/linux-master/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_migrate.c27 #include <linux/migrate.h>
181 * @mfence: migrate fence
263 static unsigned long svm_migrate_unsuccessful_pages(struct migrate_vma *migrate) argument
268 for (i = 0; i < migrate->npages; i++) {
269 if (migrate->src[i] & MIGRATE_PFN_VALID &&
270 !(migrate->src[i] & MIGRATE_PFN_MIGRATE))
278 struct migrate_vma *migrate, struct dma_fence **mfence,
281 uint64_t npages = migrate->cpages;
302 migrate->dst[i] = svm_migrate_addr_to_pfn(adev, dst[i]);
303 svm_migrate_get_vram_page(prange, migrate
277 svm_migrate_copy_to_vram(struct kfd_node *node, struct svm_range *prange, struct migrate_vma *migrate, struct dma_fence **mfence, dma_addr_t *scratch, uint64_t ttm_res_offset) argument
390 struct migrate_vma migrate = { 0 }; local
576 svm_migrate_copy_to_ram(struct amdgpu_device *adev, struct svm_range *prange, struct migrate_vma *migrate, struct dma_fence **mfence, dma_addr_t *scratch, uint64_t npages) argument
692 struct migrate_vma migrate = { 0 }; local
[all...]
/linux-master/drivers/gpu/drm/i915/gem/
H A Di915_gem_object.c669 * @obj: The object to migrate
670 * @id: The region intended to migrate to
673 * given region. Note that pinning may affect the ability to migrate as
677 * possibility to migrate objects and might be slightly less permissive
707 if (!obj->ops->migrate)
726 * @obj: The object to migrate.
729 * @id: The region id to migrate to.
731 * Attempt to migrate the object to the desired memory region. The
759 * @obj: The object to migrate.
762 * @id: The region id to migrate t
[all...]
H A Di915_gem_object_types.h108 * migrate - Migrate object to a different region either for
111 int (*migrate)(struct drm_i915_gem_object *obj, member in struct:drm_i915_gem_object_ops
H A Di915_gem_ttm.c1233 .migrate = i915_ttm_migrate,
H A Di915_gem_ttm_move.c202 if (!to_gt(i915)->migrate.context || intel_gt_is_wedged(to_gt(i915)))
215 intel_engine_pm_get(to_gt(i915)->migrate.context->engine);
216 ret = intel_context_migrate_clear(to_gt(i915)->migrate.context, deps,
229 intel_engine_pm_get(to_gt(i915)->migrate.context->engine);
230 ret = intel_context_migrate_copy(to_gt(i915)->migrate.context,
242 intel_engine_pm_put(to_gt(i915)->migrate.context->engine);
661 ret = -EIO; /* never feed non-migrate fences into ttm */
/linux-master/drivers/gpu/drm/i915/gem/selftests/
H A Di915_gem_migrate.c221 err = intel_migrate_clear(&gt->migrate, &ww, deps,
H A Di915_gem_mman.c1226 err = intel_context_migrate_clear(to_gt(i915)->migrate.context, NULL,
/linux-master/drivers/gpu/drm/i915/gt/
H A Dintel_gt.c762 intel_migrate_init(&gt->migrate, gt);
787 intel_migrate_fini(&gt->migrate);
H A Dintel_gt_types.h229 struct intel_migrate migrate; member in struct:intel_gt
H A Dintel_migrate.c262 &key, "migrate");
505 * its preference list, on memory pressure, i915 needs to migrate the lmem
699 GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm);
998 GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm);
H A Dselftest_migrate.c36 static int copy(struct intel_migrate *migrate, argument
37 int (*fn)(struct intel_migrate *migrate,
44 struct drm_i915_private *i915 = migrate->context->engine->i915;
90 err = fn(migrate, &ww, src, dst, &rq);
151 GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm);
255 static int clear(struct intel_migrate *migrate, argument
256 int (*fn)(struct intel_migrate *migrate,
263 struct drm_i915_private *i915 = migrate->context->engine->i915;
301 err = intel_migrate_ccs_copy(migrate, &ww, NULL,
318 err = fn(migrate,
410 __migrate_copy(struct intel_migrate *migrate, struct i915_gem_ww_ctx *ww, struct drm_i915_gem_object *src, struct drm_i915_gem_object *dst, struct i915_request **out) argument
424 __global_copy(struct intel_migrate *migrate, struct i915_gem_ww_ctx *ww, struct drm_i915_gem_object *src, struct drm_i915_gem_object *dst, struct i915_request **out) argument
439 migrate_copy(struct intel_migrate *migrate, u32 sz, struct rnd_state *prng) argument
445 global_copy(struct intel_migrate *migrate, u32 sz, struct rnd_state *prng) argument
450 __migrate_clear(struct intel_migrate *migrate, struct i915_gem_ww_ctx *ww, struct drm_i915_gem_object *obj, u32 value, struct i915_request **out) argument
463 __global_clear(struct intel_migrate *migrate, struct i915_gem_ww_ctx *ww, struct drm_i915_gem_object *obj, u32 value, struct i915_request **out) argument
477 migrate_clear(struct intel_migrate *migrate, u32 sz, struct rnd_state *prng) argument
483 global_clear(struct intel_migrate *migrate, u32 sz, struct rnd_state *prng) argument
491 struct intel_migrate *migrate = &gt->migrate; local
513 struct intel_migrate *migrate = &gt->migrate; local
549 struct intel_migrate *migrate = &gt->migrate; local
677 struct intel_migrate *migrate; member in struct:threaded_migrate
682 threaded_migrate(struct intel_migrate *migrate, int (*fn)(void *arg), unsigned int flags) argument
741 struct intel_migrate *migrate = &gt->migrate; local
756 struct intel_migrate *migrate = &gt->migrate; local
778 struct intel_migrate *migrate = &gt->migrate; local
786 struct intel_migrate *migrate = &gt->migrate; local
[all...]
/linux-master/drivers/gpu/drm/i915/selftests/
H A Di915_live_selftests.h30 selftest(migrate, intel_migrate_live_selftests)
H A Di915_perf_selftests.h20 selftest(migrate, intel_migrate_perf_selftests)
H A Dintel_memory_region.c1072 err = intel_context_migrate_clear(engine->gt->migrate.context, NULL,
/linux-master/drivers/gpu/drm/nouveau/
H A Dnouveau_dmem.c42 #include <linux/migrate.h>
82 struct nouveau_dmem_migrate migrate; member in struct:nouveau_dmem
152 if (drm->dmem->migrate.copy_func(drm, 1, NOUVEAU_APER_HOST, *dma_addr,
183 * FIXME what we really want is to find some heuristic to migrate more
212 nouveau_fence_new(&fence, dmem->migrate.chan);
405 nouveau_fence_new(&fence, chunk->drm->dmem->migrate.chan);
446 struct nvif_push *push = drm->dmem->migrate.chan->chan.push;
519 struct nvif_push *push = drm->dmem->migrate.chan->chan.push;
579 drm->dmem->migrate.copy_func = nvc0b5_migrate_copy;
580 drm->dmem->migrate
[all...]
/linux-master/drivers/gpu/drm/xe/tests/
H A Dxe_bo.c39 fence = xe_migrate_clear(tile->migrate, bo, bo->ttm.resource);
190 struct xe_vm *vm = xe_migrate_get_vm(xe_device_get_root_tile(xe)->migrate);
H A Dxe_migrate.c427 struct xe_migrate *m = tile->migrate;
/linux-master/drivers/gpu/drm/xe/
H A Dxe_bo.c124 return tile->migrate;
622 struct xe_migrate *migrate = NULL; local
708 migrate = bo->tile->migrate;
710 migrate = mem_type_to_migrate(xe, new_mem->mem_type);
712 migrate = mem_type_to_migrate(xe, old_mem_type);
714 migrate = xe->tiles[0].migrate;
716 xe_assert(xe, migrate);
754 fence = xe_migrate_clear(migrate, b
[all...]

Completed in 426 milliseconds

1234