Lines Matching refs:dev_priv

126  * @dev_priv: Pointer to the device private for this resource. Immutable.
127 * @id: Device id. Protected by @dev_priv::resource_lock.
145 * @lru_head: List head for the LRU list. Protected by @dev_priv::resource_lock.
147 * the @dev_priv::binding_mutex
157 struct vmw_private *dev_priv;
348 struct vmw_private *dev_priv;
669 static inline void vmw_write(struct vmw_private *dev_priv,
672 if (vmw_is_svga_v3(dev_priv)) {
673 iowrite32(value, dev_priv->rmmio + offset);
675 spin_lock(&dev_priv->hw_lock);
676 outl(offset, dev_priv->io_start + SVGA_INDEX_PORT);
677 outl(value, dev_priv->io_start + SVGA_VALUE_PORT);
678 spin_unlock(&dev_priv->hw_lock);
682 static inline uint32_t vmw_read(struct vmw_private *dev_priv,
687 if (vmw_is_svga_v3(dev_priv)) {
688 val = ioread32(dev_priv->rmmio + offset);
690 spin_lock(&dev_priv->hw_lock);
691 outl(offset, dev_priv->io_start + SVGA_INDEX_PORT);
692 val = inl(dev_priv->io_start + SVGA_VALUE_PORT);
693 spin_unlock(&dev_priv->hw_lock);
701 * @dev_priv: Device private.
705 static inline bool has_sm4_context(const struct vmw_private *dev_priv)
707 return (dev_priv->sm_type >= VMW_SM_4);
712 * @dev_priv: Device private.
716 static inline bool has_sm4_1_context(const struct vmw_private *dev_priv)
718 return (dev_priv->sm_type >= VMW_SM_4_1);
723 * @dev_priv: Device private.
727 static inline bool has_sm5_context(const struct vmw_private *dev_priv)
729 return (dev_priv->sm_type >= VMW_SM_5);
734 * @dev_priv: Device private.
738 static inline bool has_gl43_context(const struct vmw_private *dev_priv)
740 return (dev_priv->sm_type >= VMW_SM_5_1X);
744 static inline u32 vmw_max_num_uavs(struct vmw_private *dev_priv)
746 return (has_gl43_context(dev_priv) ?
750 extern void vmw_svga_enable(struct vmw_private *dev_priv);
751 extern void vmw_svga_disable(struct vmw_private *dev_priv);
759 extern int vmw_gmr_bind(struct vmw_private *dev_priv,
763 extern void vmw_gmr_unbind(struct vmw_private *dev_priv, int gmr_id);
779 extern int vmw_user_lookup_handle(struct vmw_private *dev_priv,
785 struct vmw_private *dev_priv,
795 extern int vmw_user_stream_lookup(struct vmw_private *dev_priv,
809 void vmw_resource_evict_all(struct vmw_private *dev_priv);
837 extern int vmw_gem_object_create_with_handle(struct vmw_private *dev_priv,
863 extern struct vmw_fifo_state *vmw_fifo_create(struct vmw_private *dev_priv);
864 extern void vmw_fifo_destroy(struct vmw_private *dev_priv);
867 vmw_cmd_ctx_reserve(struct vmw_private *dev_priv, uint32_t bytes, int ctx_id);
868 extern void vmw_cmd_commit(struct vmw_private *dev_priv, uint32_t bytes);
869 extern void vmw_cmd_commit_flush(struct vmw_private *dev_priv, uint32_t bytes);
870 extern int vmw_cmd_send_fence(struct vmw_private *dev_priv, uint32_t *seqno);
871 extern bool vmw_supports_3d(struct vmw_private *dev_priv);
872 extern void vmw_fifo_ping_host(struct vmw_private *dev_priv, uint32_t reason);
873 extern bool vmw_fifo_have_pitchlock(struct vmw_private *dev_priv);
874 extern int vmw_cmd_emit_dummy_query(struct vmw_private *dev_priv,
876 extern int vmw_cmd_flush(struct vmw_private *dev_priv,
895 * @dev_priv: The device private context
897 static inline uint32_t vmw_fifo_caps(const struct vmw_private *dev_priv)
899 if (!dev_priv->fifo_mem || !dev_priv->fifo)
901 return dev_priv->fifo->capabilities;
908 * @dev_priv: The device private context
911 vmw_is_cursor_bypass3_enabled(const struct vmw_private *dev_priv)
913 return (vmw_fifo_caps(dev_priv) & SVGA_FIFO_CAP_CURSOR_BYPASS_3) != 0;
926 int vmw_bo_create_and_populate(struct vmw_private *dev_priv,
978 struct vmw_private *dev_priv,
988 extern void __vmw_execbuf_release_pinned_bo(struct vmw_private *dev_priv,
990 extern void vmw_execbuf_release_pinned_bo(struct vmw_private *dev_priv);
993 struct vmw_private *dev_priv,
996 extern int vmw_execbuf_copy_fence_user(struct vmw_private *dev_priv,
1010 extern int vmw_irq_install(struct vmw_private *dev_priv);
1012 extern bool vmw_seqno_passed(struct vmw_private *dev_priv,
1014 extern int vmw_fallback_wait(struct vmw_private *dev_priv,
1020 extern void vmw_update_seqno(struct vmw_private *dev_priv);
1021 extern void vmw_seqno_waiter_add(struct vmw_private *dev_priv);
1022 extern void vmw_seqno_waiter_remove(struct vmw_private *dev_priv);
1023 extern void vmw_goal_waiter_add(struct vmw_private *dev_priv);
1024 extern void vmw_goal_waiter_remove(struct vmw_private *dev_priv);
1025 extern void vmw_generic_waiter_add(struct vmw_private *dev_priv, u32 flag,
1027 extern void vmw_generic_waiter_remove(struct vmw_private *dev_priv,
1034 int vmw_kms_init(struct vmw_private *dev_priv);
1035 int vmw_kms_close(struct vmw_private *dev_priv);
1038 void vmw_kms_cursor_post_execbuf(struct vmw_private *dev_priv);
1046 bool vmw_kms_validate_mode_vram(struct vmw_private *dev_priv,
1049 int vmw_kms_present(struct vmw_private *dev_priv,
1058 void vmw_kms_legacy_hotspot_clear(struct vmw_private *dev_priv);
1074 int vmw_overlay_init(struct vmw_private *dev_priv);
1075 int vmw_overlay_close(struct vmw_private *dev_priv);
1078 int vmw_overlay_resume_all(struct vmw_private *dev_priv);
1079 int vmw_overlay_pause_all(struct vmw_private *dev_priv);
1080 int vmw_overlay_claim(struct vmw_private *dev_priv, uint32_t *out);
1081 int vmw_overlay_unref(struct vmw_private *dev_priv, uint32_t stream_id);
1082 int vmw_overlay_num_overlays(struct vmw_private *dev_priv);
1083 int vmw_overlay_num_free_overlays(struct vmw_private *dev_priv);
1089 int vmw_gmrid_man_init(struct vmw_private *dev_priv, int type);
1090 void vmw_gmrid_man_fini(struct vmw_private *dev_priv, int type);
1095 int vmw_sys_man_init(struct vmw_private *dev_priv);
1096 void vmw_sys_man_fini(struct vmw_private *dev_priv);
1118 extern int vmw_mob_bind(struct vmw_private *dev_priv, struct vmw_mob *mob,
1121 extern void vmw_mob_unbind(struct vmw_private *dev_priv,
1125 extern int vmw_otables_setup(struct vmw_private *dev_priv);
1126 extern void vmw_otables_takedown(struct vmw_private *dev_priv);
1179 int vmw_gb_surface_define(struct vmw_private *dev_priv,
1193 extern int vmw_compat_shader_add(struct vmw_private *dev_priv,
1207 extern void vmw_dx_shader_cotable_list_scrub(struct vmw_private *dev_priv,
1229 void vmw_dx_streamoutput_cotable_list_scrub(struct vmw_private *dev_priv,
1238 vmw_cmdbuf_res_man_create(struct vmw_private *dev_priv);
1261 extern struct vmw_resource *vmw_cotable_alloc(struct vmw_private *dev_priv,
1276 vmw_cmdbuf_man_create(struct vmw_private *dev_priv);
1353 int vmw_mksstat_get_kern_slot(pid_t pid, struct vmw_private *dev_priv);
1361 int vmw_mksstat_remove_all(struct vmw_private *dev_priv);
1416 static inline void vmw_fifo_resource_inc(struct vmw_private *dev_priv)
1418 atomic_inc(&dev_priv->num_fifo_resources);
1421 static inline void vmw_fifo_resource_dec(struct vmw_private *dev_priv)
1423 atomic_dec(&dev_priv->num_fifo_resources);
1455 static inline u32 vmw_fence_read(struct vmw_private *dev_priv)
1458 if (vmw_is_svga_v3(dev_priv))
1459 fence = vmw_read(dev_priv, SVGA_REG_FENCE);
1461 fence = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_FENCE);
1465 static inline void vmw_fence_write(struct vmw_private *dev_priv,
1468 BUG_ON(vmw_is_svga_v3(dev_priv));
1469 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_FENCE, fence);