Searched refs:qdev (Results 1 - 25 of 28) sorted by relevance

12

/linux-master/drivers/gpu/drm/qxl/
H A Dqxl_kms.c36 static bool qxl_check_device(struct qxl_device *qdev) argument
38 struct qxl_rom *rom = qdev->rom;
53 qdev->vram_size = rom->surface0_area_size;
58 static void setup_hw_slot(struct qxl_device *qdev, struct qxl_memslot *slot) argument
60 qdev->ram_header->mem_slot.mem_start = slot->start_phys_addr;
61 qdev->ram_header->mem_slot.mem_end = slot->start_phys_addr + slot->size;
62 qxl_io_memslot_add(qdev, qdev->rom->slots_start + slot->index);
65 static void setup_slot(struct qxl_device *qdev, argument
79 setup_hw_slot(qdev, slo
94 qxl_reinit_memslots(struct qxl_device *qdev) argument
102 struct qxl_device *qdev = container_of(work, struct qxl_device, gc_work); local
107 qxl_device_init(struct qxl_device *qdev, struct pci_dev *pdev) argument
283 qxl_device_fini(struct qxl_device *qdev) argument
[all...]
H A Dqxl_irq.c35 struct qxl_device *qdev = to_qxl(dev); local
38 pending = xchg(&qdev->ram_header->int_pending, 0);
43 atomic_inc(&qdev->irq_received);
46 atomic_inc(&qdev->irq_received_display);
47 wake_up_all(&qdev->display_event);
48 qxl_queue_garbage_collect(qdev, false);
51 atomic_inc(&qdev->irq_received_cursor);
52 wake_up_all(&qdev->cursor_event);
55 atomic_inc(&qdev->irq_received_io_cmd);
56 wake_up_all(&qdev
76 struct qxl_device *qdev = container_of(work, struct qxl_device, local
82 qxl_irq_init(struct qxl_device *qdev) argument
[all...]
H A Dqxl_cmd.c35 static int qxl_reap_surface_id(struct qxl_device *qdev, int max_to_reap);
178 qxl_push_command_ring_release(struct qxl_device *qdev, struct qxl_release *release, argument
184 cmd.data = qxl_bo_physical_address(qdev, release->release_bo, release->release_offset);
186 return qxl_ring_push(qdev->command_ring, &cmd, interruptible);
190 qxl_push_cursor_ring_release(struct qxl_device *qdev, struct qxl_release *release, argument
196 cmd.data = qxl_bo_physical_address(qdev, release->release_bo, release->release_offset);
198 return qxl_ring_push(qdev->cursor_ring, &cmd, interruptible);
201 bool qxl_queue_garbage_collect(struct qxl_device *qdev, bool flush) argument
203 if (!qxl_check_idle(qdev->release_ring)) {
204 schedule_work(&qdev
212 qxl_garbage_collect(struct qxl_device *qdev) argument
255 qxl_alloc_bo_reserved(struct qxl_device *qdev, struct qxl_release *release, unsigned long size, struct qxl_bo **_bo) argument
280 wait_for_io_cmd_user(struct qxl_device *qdev, uint8_t val, long port, bool intr) argument
315 wait_for_io_cmd(struct qxl_device *qdev, uint8_t val, long port) argument
325 qxl_io_update_area(struct qxl_device *qdev, struct qxl_bo *surf, const struct qxl_rect *area) argument
354 qxl_io_notify_oom(struct qxl_device *qdev) argument
359 qxl_io_flush_release(struct qxl_device *qdev) argument
364 qxl_io_flush_surfaces(struct qxl_device *qdev) argument
369 qxl_io_destroy_primary(struct qxl_device *qdev) argument
377 qxl_io_create_primary(struct qxl_device *qdev, struct qxl_bo *bo) argument
403 qxl_io_memslot_add(struct qxl_device *qdev, uint8_t id) argument
409 qxl_io_reset(struct qxl_device *qdev) argument
414 qxl_io_monitors_config(struct qxl_device *qdev) argument
419 qxl_surface_id_alloc(struct qxl_device *qdev, struct qxl_bo *surf) argument
449 qxl_surface_id_dealloc(struct qxl_device *qdev, uint32_t surface_id) argument
457 qxl_hw_surface_alloc(struct qxl_device *qdev, struct qxl_bo *surf) argument
504 qxl_hw_surface_dealloc(struct qxl_device *qdev, struct qxl_bo *surf) argument
543 qxl_update_surface(struct qxl_device *qdev, struct qxl_bo *surf) argument
561 qxl_surface_evict_locked(struct qxl_device *qdev, struct qxl_bo *surf, bool do_update_area) argument
571 qxl_surface_evict(struct qxl_device *qdev, struct qxl_bo *surf, bool do_update_area) argument
578 qxl_reap_surf(struct qxl_device *qdev, struct qxl_bo *surf, bool stall) argument
615 qxl_reap_surface_id(struct qxl_device *qdev, int max_to_reap) argument
[all...]
H A Dqxl_gem.c34 struct qxl_device *qdev; local
37 qdev = to_qxl(gobj->dev);
39 qxl_surface_evict(qdev, qobj, false);
45 int qxl_gem_object_create(struct qxl_device *qdev, int size, argument
58 r = qxl_bo_create(qdev, size, kernel, false, initial_domain, 0, surf, &qbo);
68 mutex_lock(&qdev->gem.mutex);
69 list_add_tail(&qbo->list, &qdev->gem.objects);
70 mutex_unlock(&qdev->gem.mutex);
81 int qxl_gem_object_create_with_handle(struct qxl_device *qdev, argument
94 r = qxl_gem_object_create(qdev, siz
123 qxl_gem_init(struct qxl_device *qdev) argument
128 qxl_gem_fini(struct qxl_device *qdev) argument
[all...]
H A Dqxl_debugfs.c42 struct qxl_device *qdev = to_qxl(node->minor->dev); local
44 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received));
45 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_display));
46 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_cursor));
47 seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_io_cmd));
48 seq_printf(m, "%d\n", qdev->irq_received_error);
56 struct qxl_device *qdev = to_qxl(node->minor->dev); local
59 list_for_each_entry(bo, &qdev->gem.objects, list) {
99 void qxl_debugfs_add_files(struct qxl_device *qdev, argument
105 for (i = 0; i < qdev
[all...]
H A Dqxl_display.c49 static int qxl_alloc_client_monitors_config(struct qxl_device *qdev, argument
52 if (qdev->client_monitors_config &&
53 count > qdev->client_monitors_config->count) {
54 kfree(qdev->client_monitors_config);
55 qdev->client_monitors_config = NULL;
57 if (!qdev->client_monitors_config) {
58 qdev->client_monitors_config = kzalloc(
59 struct_size(qdev->client_monitors_config,
61 if (!qdev->client_monitors_config)
64 qdev
75 qxl_display_copy_rom_client_monitors_config(struct qxl_device *qdev) argument
148 qxl_update_offset_props(struct qxl_device *qdev) argument
167 qxl_display_read_client_monitors_config(struct qxl_device *qdev) argument
202 qxl_check_mode(struct qxl_device *qdev, unsigned int width, unsigned int height) argument
218 qxl_check_framebuffer(struct qxl_device *qdev, struct qxl_bo *bo) argument
230 struct qxl_device *qdev = to_qxl(dev); local
251 struct qxl_device *qdev = to_qxl(dev); local
292 qxl_send_monitors_config(struct qxl_device *qdev) argument
319 struct qxl_device *qdev = to_qxl(dev); local
410 struct qxl_device *qdev = to_qxl(fb->dev); local
474 struct qxl_device *qdev = to_qxl(plane->dev); local
485 qxl_primary_apply_cursor(struct qxl_device *qdev, struct drm_plane_state *plane_state) argument
530 qxl_primary_move_cursor(struct qxl_device *qdev, struct drm_plane_state *plane_state) argument
564 qxl_create_cursor(struct qxl_device *qdev, struct qxl_bo *user_bo, int hot_x, int hot_y) argument
641 struct qxl_device *qdev = to_qxl(plane->dev); local
674 struct qxl_device *qdev = to_qxl(plane->dev); local
693 struct qxl_device *qdev = to_qxl(plane->dev); local
708 struct qxl_device *qdev = to_qxl(plane->dev); local
738 qxl_update_dumb_head(struct qxl_device *qdev, int index, struct qxl_bo *bo) argument
766 qxl_calc_dumb_shadow(struct qxl_device *qdev, struct qxl_surface *surf) argument
793 qxl_prepare_shadow(struct qxl_device *qdev, struct qxl_bo *user_bo, int crtc_index) argument
830 struct qxl_device *qdev = to_qxl(plane->dev); local
931 qxl_create_plane(struct qxl_device *qdev, unsigned int possible_crtcs, enum drm_plane_type type) argument
979 struct qxl_device *qdev = to_qxl(dev); local
1021 struct qxl_device *qdev = to_qxl(dev); local
1047 struct qxl_device *qdev = to_qxl(ddev); local
1077 struct qxl_device *qdev = to_qxl(ddev); local
1113 qxl_mode_create_hotplug_mode_update_property(struct qxl_device *qdev) argument
1127 struct qxl_device *qdev = to_qxl(dev); local
1188 qxl_create_monitors_object(struct qxl_device *qdev) argument
1223 qxl_destroy_monitors_object(struct qxl_device *qdev) argument
1244 qxl_modeset_init(struct qxl_device *qdev) argument
1279 qxl_modeset_fini(struct qxl_device *qdev) argument
[all...]
H A Dqxl_drv.h258 int qxl_device_init(struct qxl_device *qdev, struct pci_dev *pdev);
259 void qxl_device_fini(struct qxl_device *qdev);
261 int qxl_modeset_init(struct qxl_device *qdev);
262 void qxl_modeset_fini(struct qxl_device *qdev);
264 int qxl_bo_init(struct qxl_device *qdev);
265 void qxl_bo_fini(struct qxl_device *qdev);
267 void qxl_reinit_memslots(struct qxl_device *qdev);
268 int qxl_surf_evict(struct qxl_device *qdev);
269 int qxl_vram_evict(struct qxl_device *qdev);
280 qxl_bo_physical_address(struct qxl_device *qdev, struc argument
[all...]
H A Dqxl_ttm.c43 struct qxl_device *qdev; local
46 qdev = container_of(mman, struct qxl_device, mman);
47 return qdev;
74 struct qxl_device *qdev = qxl_get_qdev(bdev); local
82 mem->bus.offset = (mem->start << PAGE_SHIFT) + qdev->vram_base;
88 qdev->surfaceram_base;
125 struct qxl_device *qdev; local
130 qdev = to_qxl(qbo->tbo.base.dev);
133 qxl_surface_evict(qdev, qbo, new_mem ? true : false);
183 static int qxl_ttm_init_mem_type(struct qxl_device *qdev, argument
190 qxl_ttm_init(struct qxl_device *qdev) argument
226 qxl_ttm_fini(struct qxl_device *qdev) argument
234 qxl_ttm_debugfs_init(struct qxl_device *qdev) argument
[all...]
H A Dqxl_release.c60 struct qxl_device *qdev; local
66 qdev = container_of(fence->lock, struct qxl_device, release_lock);
76 qxl_io_notify_oom(qdev);
79 if (!qxl_queue_garbage_collect(qdev, true))
124 qxl_release_alloc(struct qxl_device *qdev, int type, argument
143 spin_lock(&qdev->release_idr_lock);
144 handle = idr_alloc(&qdev->release_idr, release, 1, 0, GFP_NOWAIT);
145 release->base.seqno = ++qdev->release_seqno;
146 spin_unlock(&qdev->release_idr_lock);
177 qxl_release_free(struct qxl_device *qdev, argument
202 qxl_release_bo_alloc(struct qxl_device *qdev, struct qxl_bo **bo, u32 priority) argument
291 qxl_alloc_surface_release_reserved(struct qxl_device *qdev, enum qxl_surface_cmd_type surface_cmd_type, struct qxl_release *create_rel, struct qxl_release **release) argument
322 qxl_alloc_release_reserved(struct qxl_device *qdev, unsigned long size, int type, struct qxl_release **release, struct qxl_bo **rbo) argument
404 qxl_release_from_id_locked(struct qxl_device *qdev, uint64_t id) argument
420 qxl_release_map(struct qxl_device *qdev, struct qxl_release *release) argument
434 qxl_release_unmap(struct qxl_device *qdev, struct qxl_release *release, union qxl_release_info *info) argument
450 struct qxl_device *qdev; local
[all...]
H A Dqxl_drv.c74 struct qxl_device *qdev; local
83 qdev = devm_drm_dev_alloc(&pdev->dev, &qxl_driver,
85 if (IS_ERR(qdev)) {
106 ret = qxl_device_init(qdev, pdev);
110 ret = qxl_modeset_init(qdev);
114 drm_kms_helper_poll_init(&qdev->ddev);
117 ret = drm_dev_register(&qdev->ddev, ent->driver_data);
121 drm_fbdev_generic_setup(&qdev->ddev, 32);
125 qxl_modeset_fini(qdev);
127 qxl_device_fini(qdev);
139 struct qxl_device *qdev = to_qxl(dev); local
172 struct qxl_device *qdev = to_qxl(dev); local
194 struct qxl_device *qdev = to_qxl(dev); local
224 struct qxl_device *qdev = to_qxl(drm_dev); local
254 struct qxl_device *qdev = to_qxl(drm_dev); local
[all...]
H A Dqxl_draw.c31 static int alloc_clips(struct qxl_device *qdev, argument
38 return qxl_alloc_bo_reserved(qdev, release, size, clips_bo);
44 static struct qxl_rect *drawable_set_clipping(struct qxl_device *qdev, argument
65 alloc_drawable(struct qxl_device *qdev, struct qxl_release **release) argument
67 return qxl_alloc_release_reserved(qdev, sizeof(struct qxl_drawable),
72 free_drawable(struct qxl_device *qdev, struct qxl_release *release) argument
74 qxl_release_free(qdev, release);
79 make_drawable(struct qxl_device *qdev, int surface, uint8_t type, argument
86 drawable = (struct qxl_drawable *)qxl_release_map(qdev, release);
114 drawable->mm_time = qdev
126 qxl_draw_dirty_fb(struct qxl_device *qdev, struct drm_framebuffer *fb, struct qxl_bo *bo, unsigned int flags, unsigned int color, struct drm_clip_rect *clips, unsigned int num_clips, int inc, uint32_t dumb_shadow_offset) argument
[all...]
H A Dqxl_image.c33 qxl_allocate_chunk(struct qxl_device *qdev, argument
45 ret = qxl_alloc_bo_reserved(qdev, release, chunk_size, &chunk->bo);
56 qxl_image_alloc_objects(struct qxl_device *qdev, argument
70 ret = qxl_alloc_bo_reserved(qdev, release, sizeof(struct qxl_image), &image->bo);
76 ret = qxl_allocate_chunk(qdev, release, image, sizeof(struct qxl_data_chunk) + stride * height);
86 void qxl_image_free_objects(struct qxl_device *qdev, struct qxl_drm_image *dimage) argument
100 qxl_image_init_helper(struct qxl_device *qdev, argument
127 ptr = qxl_bo_kmap_atomic_page(qdev, chunk_bo, 0);
132 qxl_bo_kunmap_atomic_page(qdev, chunk_bo, ptr);
146 ptr = qxl_bo_kmap_atomic_page(qdev, chunk_b
230 qxl_image_init(struct qxl_device *qdev, struct qxl_release *release, struct qxl_drm_image *dimage, const uint8_t *data, int x, int y, int width, int height, int depth, int stride) argument
[all...]
H A Dqxl_object.c38 struct qxl_device *qdev; local
41 qdev = to_qxl(bo->tbo.base.dev);
43 qxl_surface_evict(qdev, bo, false);
45 mutex_lock(&qdev->gem.mutex);
47 mutex_unlock(&qdev->gem.mutex);
107 int qxl_bo_create(struct qxl_device *qdev, unsigned long size, argument
126 r = drm_gem_object_init(&qdev->ddev, &bo->tbo.base, size);
142 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type,
147 dev_err(qdev->ddev.dev,
205 void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev, argument
265 qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *pmap) argument
353 qxl_bo_force_delete(struct qxl_device *qdev) argument
372 qxl_bo_init(struct qxl_device *qdev) argument
377 qxl_bo_fini(struct qxl_device *qdev) argument
382 qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo) argument
399 qxl_surf_evict(struct qxl_device *qdev) argument
407 qxl_vram_evict(struct qxl_device *qdev) argument
[all...]
H A Dqxl_ioctl.c38 struct qxl_device *qdev = to_qxl(dev); local
48 ret = qxl_gem_object_create_with_handle(qdev, file_priv,
64 struct qxl_device *qdev = to_qxl(dev); local
67 return drm_gem_ttm_dumb_map_offset(file_priv, &qdev->ddev, qxl_map->handle,
85 apply_reloc(struct qxl_device *qdev, struct qxl_reloc_info *info) argument
89 reloc_page = qxl_bo_kmap_atomic_page(qdev, info->dst_bo, info->dst_offset & PAGE_MASK);
90 *(uint64_t *)(reloc_page + (info->dst_offset & ~PAGE_MASK)) = qxl_bo_physical_address(qdev,
93 qxl_bo_kunmap_atomic_page(qdev, info->dst_bo, reloc_page);
97 apply_surf_reloc(struct qxl_device *qdev, struct qxl_reloc_info *info) argument
105 reloc_page = qxl_bo_kmap_atomic_page(qdev, inf
139 qxl_process_single_command(struct qxl_device *qdev, struct drm_qxl_command *cmd, struct drm_file *file_priv) argument
272 struct qxl_device *qdev = to_qxl(dev); local
296 struct qxl_device *qdev = to_qxl(dev); local
345 struct qxl_device *qdev = to_qxl(dev); local
363 struct qxl_device *qdev = to_qxl(dev); local
384 struct qxl_device *qdev = to_qxl(dev); local
[all...]
H A Dqxl_object.h56 extern int qxl_bo_create(struct qxl_device *qdev,
66 void *qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset);
67 void qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *map);
H A Dqxl_dumb.c35 struct qxl_device *qdev = to_qxl(dev); local
64 r = qxl_gem_object_create_with_handle(qdev, file_priv,
/linux-master/drivers/net/ethernet/qlogic/
H A Dqla3xxx.c103 static int ql_sem_spinlock(struct ql3_adapter *qdev, argument
107 qdev->mem_map_registers;
122 static void ql_sem_unlock(struct ql3_adapter *qdev, u32 sem_mask) argument
125 qdev->mem_map_registers;
130 static int ql_sem_lock(struct ql3_adapter *qdev, u32 sem_mask, u32 sem_bits) argument
133 qdev->mem_map_registers;
144 static int ql_wait_for_drvr_lock(struct ql3_adapter *qdev) argument
149 if (ql_sem_lock(qdev,
151 (QL_RESOURCE_BITS_BASE_CODE | (qdev->mac_index)
153 netdev_printk(KERN_DEBUG, qdev
164 ql_set_register_page(struct ql3_adapter *qdev, u32 page) argument
175 ql_read_common_reg_l(struct ql3_adapter *qdev, u32 __iomem *reg) argument
187 ql_read_common_reg(struct ql3_adapter *qdev, u32 __iomem *reg) argument
192 ql_read_page0_reg_l(struct ql3_adapter *qdev, u32 __iomem *reg) argument
207 ql_read_page0_reg(struct ql3_adapter *qdev, u32 __iomem *reg) argument
214 ql_write_common_reg_l(struct ql3_adapter *qdev, u32 __iomem *reg, u32 value) argument
225 ql_write_common_reg(struct ql3_adapter *qdev, u32 __iomem *reg, u32 value) argument
232 ql_write_nvram_reg(struct ql3_adapter *qdev, u32 __iomem *reg, u32 value) argument
240 ql_write_page0_reg(struct ql3_adapter *qdev, u32 __iomem *reg, u32 value) argument
252 ql_write_page1_reg(struct ql3_adapter *qdev, u32 __iomem *reg, u32 value) argument
264 ql_write_page2_reg(struct ql3_adapter *qdev, u32 __iomem *reg, u32 value) argument
273 ql_disable_interrupts(struct ql3_adapter *qdev) argument
283 ql_enable_interrupts(struct ql3_adapter *qdev) argument
293 ql_release_to_lrg_buf_free_list(struct ql3_adapter *qdev, struct ql_rcv_buf_cb *lrg_buf_cb) argument
348 ql_get_from_lrg_buf_free_list(struct ql3_adapter *qdev) argument
373 fm93c56a_select(struct ql3_adapter *qdev) argument
386 fm93c56a_cmd(struct ql3_adapter *qdev, u32 cmd, u32 eepromAddr) argument
459 fm93c56a_deselect(struct ql3_adapter *qdev) argument
472 fm93c56a_datain(struct ql3_adapter *qdev, unsigned short *value) argument
500 eeprom_readword(struct ql3_adapter *qdev, u32 eepromAddr, unsigned short *value) argument
519 ql_get_nvram_params(struct ql3_adapter *qdev) argument
560 ql_wait_for_mii_ready(struct ql3_adapter *qdev) argument
577 ql_mii_enable_scan_mode(struct ql3_adapter *qdev) argument
604 ql_mii_disable_scan_mode(struct ql3_adapter *qdev) argument
634 ql_mii_write_reg_ex(struct ql3_adapter *qdev, u16 regAddr, u16 value, u32 phyAddr) argument
665 ql_mii_read_reg_ex(struct ql3_adapter *qdev, u16 regAddr, u16 *value, u32 phyAddr) argument
704 ql_mii_write_reg(struct ql3_adapter *qdev, u16 regAddr, u16 value) argument
732 ql_mii_read_reg(struct ql3_adapter *qdev, u16 regAddr, u16 *value) argument
768 ql_petbi_reset(struct ql3_adapter *qdev) argument
773 ql_petbi_start_neg(struct ql3_adapter *qdev) argument
791 ql_petbi_reset_ex(struct ql3_adapter *qdev) argument
797 ql_petbi_start_neg_ex(struct ql3_adapter *qdev) argument
818 ql_petbi_init(struct ql3_adapter *qdev) argument
824 ql_petbi_init_ex(struct ql3_adapter *qdev) argument
830 ql_is_petbi_neg_pause(struct ql3_adapter *qdev) argument
840 phyAgereSpecificInit(struct ql3_adapter *qdev, u32 miiAddr) argument
874 getPhyType(struct ql3_adapter *qdev, u16 phyIdReg0, u16 phyIdReg1) argument
907 ql_phy_get_speed(struct ql3_adapter *qdev) argument
938 ql_is_full_dup(struct ql3_adapter *qdev) argument
958 ql_is_phy_neg_pause(struct ql3_adapter *qdev) argument
968 PHY_Setup(struct ql3_adapter *qdev) argument
1034 ql_mac_enable(struct ql3_adapter *qdev, u32 enable) argument
1054 ql_mac_cfg_soft_reset(struct ql3_adapter *qdev, u32 enable) argument
1074 ql_mac_cfg_gig(struct ql3_adapter *qdev, u32 enable) argument
1094 ql_mac_cfg_full_dup(struct ql3_adapter *qdev, u32 enable) argument
1114 ql_mac_cfg_pause(struct ql3_adapter *qdev, u32 enable) argument
1136 ql_is_fiber(struct ql3_adapter *qdev) argument
1156 ql_is_auto_cfg(struct ql3_adapter *qdev) argument
1166 ql_is_auto_neg_complete(struct ql3_adapter *qdev) argument
1194 ql_is_neg_pause(struct ql3_adapter *qdev) argument
1202 ql_auto_neg_error(struct ql3_adapter *qdev) argument
1221 ql_get_link_speed(struct ql3_adapter *qdev) argument
1229 ql_is_link_full_dup(struct ql3_adapter *qdev) argument
1240 ql_link_down_detect(struct ql3_adapter *qdev) argument
1264 ql_link_down_detect_clear(struct ql3_adapter *qdev) argument
1294 ql_this_adapter_controls_port(struct ql3_adapter *qdev) argument
1323 ql_phy_reset_ex(struct ql3_adapter *qdev) argument
1329 ql_phy_start_neg_ex(struct ql3_adapter *qdev) argument
1402 ql_phy_init_ex(struct ql3_adapter *qdev) argument
1412 ql_get_link_state(struct ql3_adapter *qdev) argument
1437 ql_port_start(struct ql3_adapter *qdev) argument
1457 ql_finish_auto_neg(struct ql3_adapter *qdev) argument
1518 struct ql3_adapter *qdev = local
1582 ql_get_phy_owner(struct ql3_adapter *qdev) argument
1593 ql_init_scan_mode(struct ql3_adapter *qdev) argument
1612 ql_mii_setup(struct ql3_adapter *qdev) argument
1649 ql_supported_modes(struct ql3_adapter *qdev) argument
1657 ql_get_auto_cfg_status(struct ql3_adapter *qdev) argument
1674 ql_get_speed(struct ql3_adapter *qdev) argument
1691 ql_get_full_dup(struct ql3_adapter *qdev) argument
1711 struct ql3_adapter *qdev = netdev_priv(ndev); local
1738 struct ql3_adapter *qdev = netdev_priv(ndev); local
1748 struct ql3_adapter *qdev = netdev_priv(ndev); local
1754 struct ql3_adapter *qdev = netdev_priv(ndev); local
1761 struct ql3_adapter *qdev = netdev_priv(ndev); local
1785 ql_populate_free_queue(struct ql3_adapter *qdev) argument
1843 ql_update_small_bufq_prod_index(struct ql3_adapter *qdev) argument
1866 ql_update_lrg_bufq_prod_index(struct ql3_adapter *qdev) argument
1916 ql_process_mac_tx_intr(struct ql3_adapter *qdev, struct ob_mac_iocb_rsp *mac_rsp) argument
1969 ql_get_sbuf(struct ql3_adapter *qdev) argument
1976 ql_get_lbuf(struct ql3_adapter *qdev) argument
1998 ql_process_mac_rx_intr(struct ql3_adapter *qdev, struct ib_mac_iocb_rsp *ib_mac_rsp_ptr) argument
2037 ql_process_macip_rx_intr(struct ql3_adapter *qdev, struct ib_ip_iocb_rsp *ib_ip_rsp_ptr) argument
2108 ql_tx_rx_clean(struct ql3_adapter *qdev, int budget) argument
2177 struct ql3_adapter *qdev = container_of(napi, struct ql3_adapter, napi); local
2203 struct ql3_adapter *qdev = netdev_priv(ndev); local
2260 ql_get_seg_count(struct ql3_adapter *qdev, unsigned short frags) argument
2300 ql_send_map(struct ql3_adapter *qdev, struct ob_mac_iocb_req *mac_iocb_ptr, struct ql_tx_buf_cb *tx_cb, struct sk_buff *skb) argument
2459 struct ql3_adapter *qdev = netdev_priv(ndev); local
2512 ql_alloc_net_req_rsp_queues(struct ql3_adapter *qdev) argument
2551 ql_free_net_req_rsp_queues(struct ql3_adapter *qdev) argument
2571 ql_alloc_buffer_queues(struct ql3_adapter *qdev) argument
2629 ql_free_buffer_queues(struct ql3_adapter *qdev) argument
2651 ql_alloc_small_buffers(struct ql3_adapter *qdev) argument
2690 ql_free_small_buffers(struct ql3_adapter *qdev) argument
2706 ql_free_large_buffers(struct ql3_adapter *qdev) argument
2726 ql_init_large_buffers(struct ql3_adapter *qdev) argument
2742 ql_alloc_large_buffers(struct ql3_adapter *qdev) argument
2798 ql_free_send_free_list(struct ql3_adapter *qdev) argument
2811 ql_create_send_free_list(struct ql3_adapter *qdev) argument
2831 ql_alloc_mem_resources(struct ql3_adapter *qdev) argument
2922 ql_free_mem_resources(struct ql3_adapter *qdev) argument
2937 ql_init_misc_registers(struct ql3_adapter *qdev) argument
2995 ql_adapter_initialize(struct ql3_adapter *qdev) argument
3250 ql_adapter_reset(struct ql3_adapter *qdev) argument
3329 ql_set_mac_info(struct ql3_adapter *qdev) argument
3377 struct ql3_adapter *qdev = netdev_priv(ndev); local
3402 ql_adapter_down(struct ql3_adapter *qdev, int do_reset) argument
3451 ql_adapter_up(struct ql3_adapter *qdev) argument
3523 ql_cycle_adapter(struct ql3_adapter *qdev, int reset) argument
3538 struct ql3_adapter *qdev = netdev_priv(ndev); local
3553 struct ql3_adapter *qdev = netdev_priv(ndev); local
3559 struct ql3_adapter *qdev = netdev_priv(ndev); local
3594 struct ql3_adapter *qdev = netdev_priv(ndev); local
3610 struct ql3_adapter *qdev = local
3710 struct ql3_adapter *qdev = local
3716 ql_get_board_info(struct ql3_adapter *qdev) argument
3738 struct ql3_adapter *qdev = from_timer(qdev, t, adapter_timer); local
3755 struct ql3_adapter *qdev = NULL; local
3904 struct ql3_adapter *qdev = netdev_priv(ndev); local
[all...]
/linux-master/net/qrtr/
H A Dsmd.c23 struct qrtr_smd_dev *qdev = dev_get_drvdata(&rpdev->dev); local
26 if (!qdev)
29 rc = qrtr_endpoint_post(&qdev->ep, data, len);
31 dev_err(qdev->dev, "invalid ipcrouter packet\n");
42 struct qrtr_smd_dev *qdev = container_of(ep, struct qrtr_smd_dev, ep); local
49 rc = rpmsg_send(qdev->channel, skb->data, skb->len);
61 struct qrtr_smd_dev *qdev; local
64 qdev = devm_kzalloc(&rpdev->dev, sizeof(*qdev), GFP_KERNEL);
65 if (!qdev)
85 struct qrtr_smd_dev *qdev = dev_get_drvdata(&rpdev->dev); local
[all...]
H A Dmhi.c24 struct qrtr_mhi_dev *qdev = dev_get_drvdata(&mhi_dev->dev); local
27 if (!qdev || mhi_res->transaction_status)
30 rc = qrtr_endpoint_post(&qdev->ep, mhi_res->buf_addr,
33 dev_err(qdev->dev, "invalid ipcrouter packet\n");
50 struct qrtr_mhi_dev *qdev = container_of(ep, struct qrtr_mhi_dev, ep); local
60 rc = mhi_queue_skb(qdev->mhi_dev, DMA_TO_DEVICE, skb, skb->len,
78 struct qrtr_mhi_dev *qdev; local
81 qdev = devm_kzalloc(&mhi_dev->dev, sizeof(*qdev), GFP_KERNEL);
82 if (!qdev)
108 struct qrtr_mhi_dev *qdev = dev_get_drvdata(&mhi_dev->dev); local
[all...]
/linux-master/drivers/accel/qaic/
H A Dqaic_drv.c89 struct qaic_device *qdev = to_qaic_device(dev); local
91 pci_set_drvdata(qdev->pdev, NULL);
106 struct qaic_device *qdev = qddev->qdev; local
111 rcu_id = srcu_read_lock(&qdev->dev_lock);
112 if (qdev->dev_state != QAIC_ONLINE) {
142 srcu_read_unlock(&qdev->dev_lock, rcu_id);
151 srcu_read_unlock(&qdev->dev_lock, rcu_id);
159 struct qaic_device *qdev; local
167 qdev
219 qaic_create_drm_device(struct qaic_device *qdev, s32 partition_id) argument
238 qaic_destroy_drm_device(struct qaic_device *qdev, s32 partition_id) argument
277 struct qaic_device *qdev; local
325 qaic_notify_reset(struct qaic_device *qdev) argument
338 qaic_dev_reset_clean_local_state(struct qaic_device *qdev) argument
353 struct qaic_device *qdev; local
419 init_pci(struct qaic_device *qdev, struct pci_dev *pdev) argument
458 init_msi(struct qaic_device *qdev, struct pci_dev *pdev) argument
510 struct qaic_device *qdev; local
547 struct qaic_device *qdev = pci_get_drvdata(pdev); local
571 struct qaic_device *qdev = pci_get_drvdata(pdev); local
580 struct qaic_device *qdev = pci_get_drvdata(pdev); local
[all...]
H A Dqaic_timesync.c69 * @qdev: Pointer to the root device struct driven by QAIC driver.
74 * @dev: Device pointer to qdev->pdev->dev stored for easy access.
78 struct qaic_device *qdev; member in struct:mqts_dev
94 struct qaic_device *qdev; member in struct:qts_resp
176 struct qaic_device *qdev = pci_get_drvdata(to_pci_dev(mhi_dev->mhi_cntrl->cntrl_dev)); local
189 mqtsdev->qdev = qdev;
190 mqtsdev->dev = &qdev->pdev->dev;
204 mqtsdev->qtimer_addr = qdev->bar_0 + QTIMER_REG_OFFSET;
253 struct qaic_device *qdev local
293 qaic_boot_timesync_queue_resp(struct mhi_device *mhi_dev, struct qaic_device *qdev) argument
317 struct qaic_device *qdev; local
326 struct qaic_device *qdev = pci_get_drvdata(to_pci_dev(mhi_dev->mhi_cntrl->cntrl_dev)); local
[all...]
H A Dqaic_control.c236 struct qaic_device *qdev; member in struct:resp_work
297 static void save_dbc_buf(struct qaic_device *qdev, struct ioctl_resources *resources, argument
303 wait_event_interruptible(qdev->dbc[dbc_id].dbc_release, !qdev->dbc[dbc_id].in_use);
304 qdev->dbc[dbc_id].req_q_base = resources->buf;
305 qdev->dbc[dbc_id].rsp_q_base = resources->rsp_q_base;
306 qdev->dbc[dbc_id].dma_addr = resources->dma_addr;
307 qdev->dbc[dbc_id].total_size = resources->total_size;
308 qdev->dbc[dbc_id].nelem = resources->nelem;
309 enable_dbc(qdev, dbc_i
315 free_dbc_buf(struct qaic_device *qdev, struct ioctl_resources *resources) argument
323 free_dma_xfers(struct qaic_device *qdev, struct ioctl_resources *resources) argument
353 encode_passthrough(struct qaic_device *qdev, void *trans, struct wrapper_list *wrappers, u32 *user_len) argument
391 find_and_map_user_pages(struct qaic_device *qdev, struct qaic_manage_trans_dma_xfer *in_trans, struct ioctl_resources *resources, struct dma_xfer *xfer) argument
548 cleanup_xfer(struct qaic_device *qdev, struct dma_xfer *xfer) argument
560 encode_dma(struct qaic_device *qdev, void *trans, struct wrapper_list *wrappers, u32 *user_len, struct ioctl_resources *resources, struct qaic_user *usr) argument
629 encode_activate(struct qaic_device *qdev, void *trans, struct wrapper_list *wrappers, u32 *user_len, struct ioctl_resources *resources) argument
705 encode_deactivate(struct qaic_device *qdev, void *trans, u32 *user_len, struct qaic_user *usr) argument
718 encode_status(struct qaic_device *qdev, void *trans, struct wrapper_list *wrappers, u32 *user_len) argument
751 encode_message(struct qaic_device *qdev, struct manage_msg *user_msg, struct wrapper_list *wrappers, struct ioctl_resources *resources, struct qaic_user *usr) argument
828 decode_passthrough(struct qaic_device *qdev, void *trans, struct manage_msg *user_msg, u32 *msg_len) argument
853 decode_activate(struct qaic_device *qdev, void *trans, struct manage_msg *user_msg, u32 *msg_len, struct ioctl_resources *resources, struct qaic_user *usr) argument
899 decode_deactivate(struct qaic_device *qdev, void *trans, u32 *msg_len, struct qaic_user *usr) argument
929 decode_status(struct qaic_device *qdev, void *trans, struct manage_msg *user_msg, u32 *user_len, struct wire_msg *msg) argument
959 decode_message(struct qaic_device *qdev, struct manage_msg *user_msg, struct wire_msg *msg, struct ioctl_resources *resources, struct qaic_user *usr) argument
1015 msg_xfer(struct qaic_device *qdev, struct wrapper_list *wrappers, u32 seq_num, bool ignore_signal) argument
1126 abort_dma_cont(struct qaic_device *qdev, struct wrapper_list *wrappers, u32 dma_chunk_id) argument
1174 qaic_manage_msg_xfer(struct qaic_device *qdev, struct qaic_user *usr, struct manage_msg *user_msg, struct ioctl_resources *resources, struct wire_msg **rsp) argument
1241 qaic_manage(struct qaic_device *qdev, struct qaic_user *usr, struct manage_msg *user_msg) argument
1288 struct qaic_device *qdev; local
1357 get_cntl_version(struct qaic_device *qdev, struct qaic_user *usr, u16 *major, u16 *minor) argument
1400 struct qaic_device *qdev = resp->qdev; local
1448 struct qaic_device *qdev = dev_get_drvdata(&mhi_dev->dev); local
1469 qaic_control_open(struct qaic_device *qdev) argument
1493 qaic_control_close(struct qaic_device *qdev) argument
1498 qaic_release_usr(struct qaic_device *qdev, struct qaic_user *usr) argument
1546 wake_all_cntl(struct qaic_device *qdev) argument
[all...]
H A Dqaic.h33 #define to_qaic_device(dev) (to_qaic_drm_device((dev))->qdev)
61 struct qaic_device *qdev; member in struct:dma_bridge_chan
162 struct qaic_device *qdev; member in struct:qaic_drm_device
267 int get_cntl_version(struct qaic_device *qdev, struct qaic_user *usr, u16 *major, u16 *minor);
273 int qaic_control_open(struct qaic_device *qdev);
274 void qaic_control_close(struct qaic_device *qdev);
275 void qaic_release_usr(struct qaic_device *qdev, struct qaic_user *usr);
279 int disable_dbc(struct qaic_device *qdev, u32 dbc_id, struct qaic_user *usr);
280 void enable_dbc(struct qaic_device *qdev, u32 dbc_id, struct qaic_user *usr);
281 void wakeup_dbc(struct qaic_device *qdev, u3
[all...]
H A Dqaic_data.c172 static int clone_range_of_sgt_for_slice(struct qaic_device *qdev, struct sg_table **sgt_out, argument
252 static int encode_reqs(struct qaic_device *qdev, struct bo_slice *slice, argument
387 static int qaic_map_one_slice(struct qaic_device *qdev, struct qaic_bo *bo, argument
394 ret = clone_range_of_sgt_for_slice(qdev, &sgt, bo->sgt, slice_ent->size, slice_ent->offset);
418 ret = encode_reqs(qdev, slice, slice_ent);
440 static int create_sgt(struct qaic_device *qdev, struct sg_table **sgt_out, u64 size) argument
554 static int qaic_validate_req(struct qaic_device *qdev, struct qaic_attach_slice_entry *slice_ent, argument
680 struct qaic_device *qdev; local
700 qdev = usr->qddev->qdev;
750 struct qaic_device *qdev; local
851 qaic_prepare_export_bo(struct qaic_device *qdev, struct qaic_bo *bo, struct qaic_attach_slice_hdr *hdr) argument
863 qaic_prepare_bo(struct qaic_device *qdev, struct qaic_bo *bo, struct qaic_attach_slice_hdr *hdr) argument
885 qaic_unprepare_export_bo(struct qaic_device *qdev, struct qaic_bo *bo) argument
890 qaic_unprepare_bo(struct qaic_device *qdev, struct qaic_bo *bo) argument
913 qaic_attach_slicing_bo(struct qaic_device *qdev, struct qaic_bo *bo, struct qaic_attach_slice_hdr *hdr, struct qaic_attach_slice_entry *slice_ent) argument
942 struct qaic_device *qdev; local
1070 copy_exec_reqs(struct qaic_device *qdev, struct bo_slice *slice, u32 dbc_id, u32 head, u32 *ptail) argument
1099 copy_partial_exec_reqs(struct qaic_device *qdev, struct bo_slice *slice, u64 resize, struct dma_bridge_chan *dbc, u32 head, u32 *ptail) argument
1166 send_bo_list_to_device(struct qaic_device *qdev, struct drm_file *file_priv, struct qaic_execute_entry *exec, unsigned int count, bool is_partial, struct dma_bridge_chan *dbc, u32 head, u32 *tail) argument
1297 struct qaic_device *qdev; local
1537 struct qaic_device *qdev; local
1663 struct qaic_device *qdev; local
1737 struct qaic_device *qdev; local
1809 detach_slice_bo(struct qaic_device *qdev, struct qaic_bo *bo) argument
1824 struct qaic_device *qdev; local
1894 empty_xfer_list(struct qaic_device *qdev, struct dma_bridge_chan *dbc) argument
1918 disable_dbc(struct qaic_device *qdev, u32 dbc_id, struct qaic_user *usr) argument
1936 enable_dbc(struct qaic_device *qdev, u32 dbc_id, struct qaic_user *usr) argument
1941 wakeup_dbc(struct qaic_device *qdev, u32 dbc_id) argument
1955 release_dbc(struct qaic_device *qdev, u32 dbc_id) argument
[all...]
/linux-master/drivers/s390/cio/
H A Dqdio.h275 struct qdio_irq *qdev = __irq; \
276 if (qdev->perf_stat_enabled) \
277 (qdev->perf_stat.__attr)++; \

Completed in 367 milliseconds

12