/linux-4.4.14/drivers/block/drbd/ |
D | drbd_actlog.c | 95 void *drbd_md_get_buffer(struct drbd_device *device, const char *intent) in drbd_md_get_buffer() argument 99 wait_event(device->misc_wait, in drbd_md_get_buffer() 100 (r = atomic_cmpxchg(&device->md_io.in_use, 0, 1)) == 0 || in drbd_md_get_buffer() 101 device->state.disk <= D_FAILED); in drbd_md_get_buffer() 106 device->md_io.current_use = intent; in drbd_md_get_buffer() 107 device->md_io.start_jif = jiffies; in drbd_md_get_buffer() 108 device->md_io.submit_jif = device->md_io.start_jif - 1; in drbd_md_get_buffer() 109 return page_address(device->md_io.page); in drbd_md_get_buffer() 112 void drbd_md_put_buffer(struct drbd_device *device) in drbd_md_put_buffer() argument 114 if (atomic_dec_and_test(&device->md_io.in_use)) in drbd_md_put_buffer() [all …]
|
D | drbd_worker.c | 70 struct drbd_device *device; in drbd_md_endio() local 72 device = bio->bi_private; in drbd_md_endio() 73 device->md_io.error = bio->bi_error; in drbd_md_endio() 86 drbd_md_put_buffer(device); in drbd_md_endio() 87 device->md_io.done = 1; in drbd_md_endio() 88 wake_up(&device->misc_wait); in drbd_md_endio() 90 if (device->ldev) /* special case: drbd_md_read() during drbd_adm_attach() */ in drbd_md_endio() 91 put_ldev(device); in drbd_md_endio() 101 struct drbd_device *device = peer_device->device; in drbd_endio_read_sec_final() local 103 spin_lock_irqsave(&device->resource->req_lock, flags); in drbd_endio_read_sec_final() [all …]
|
D | drbd_req.c | 34 static bool drbd_may_do_local_read(struct drbd_device *device, sector_t sector, int size); 37 static void _drbd_start_io_acct(struct drbd_device *device, struct drbd_request *req) in _drbd_start_io_acct() argument 40 &device->vdisk->part0); in _drbd_start_io_acct() 44 static void _drbd_end_io_acct(struct drbd_device *device, struct drbd_request *req) in _drbd_end_io_acct() argument 47 &device->vdisk->part0, req->start_jif); in _drbd_end_io_acct() 50 static struct drbd_request *drbd_req_new(struct drbd_device *device, in drbd_req_new() argument 62 req->device = device; in drbd_req_new() 87 struct drbd_device *device = req->device; in drbd_remove_request_interval() local 94 wake_up(&device->misc_wait); in drbd_remove_request_interval() 100 struct drbd_device *device = req->device; in drbd_req_destroy() local [all …]
|
D | drbd_receiver.c | 155 static struct page *__drbd_alloc_pages(struct drbd_device *device, in __drbd_alloc_pages() argument 201 static void reclaim_finished_net_peer_reqs(struct drbd_device *device, in reclaim_finished_net_peer_reqs() argument 211 list_for_each_entry_safe(peer_req, tmp, &device->net_ee, w.list) { in reclaim_finished_net_peer_reqs() 218 static void drbd_kick_lo_and_reclaim_net(struct drbd_device *device) in drbd_kick_lo_and_reclaim_net() argument 223 spin_lock_irq(&device->resource->req_lock); in drbd_kick_lo_and_reclaim_net() 224 reclaim_finished_net_peer_reqs(device, &reclaimed); in drbd_kick_lo_and_reclaim_net() 225 spin_unlock_irq(&device->resource->req_lock); in drbd_kick_lo_and_reclaim_net() 228 drbd_free_net_peer_req(device, peer_req); in drbd_kick_lo_and_reclaim_net() 254 struct drbd_device *device = peer_device->device; in drbd_alloc_pages() local 265 if (atomic_read(&device->pp_in_use) < mxb) in drbd_alloc_pages() [all …]
|
D | drbd_nl.c | 194 adm_ctx->device = minor_to_device(d_in->minor); in drbd_adm_prepare() 199 if (adm_ctx->device) in drbd_adm_prepare() 200 kref_get(&adm_ctx->device->kref); in drbd_adm_prepare() 206 if (!adm_ctx->device && (flags & DRBD_ADM_NEED_MINOR)) { in drbd_adm_prepare() 222 if (adm_ctx->device) { in drbd_adm_prepare() 238 if (adm_ctx->device && adm_ctx->resource && in drbd_adm_prepare() 239 adm_ctx->device->resource != adm_ctx->resource) { in drbd_adm_prepare() 242 adm_ctx->device->resource->name); in drbd_adm_prepare() 246 if (adm_ctx->device && in drbd_adm_prepare() 248 adm_ctx->volume != adm_ctx->device->vnr) { in drbd_adm_prepare() [all …]
|
D | drbd_main.c | 166 int _get_ldev_if_state(struct drbd_device *device, enum drbd_disk_state mins) in _get_ldev_if_state() argument 170 atomic_inc(&device->local_cnt); in _get_ldev_if_state() 171 io_allowed = (device->state.disk >= mins); in _get_ldev_if_state() 173 if (atomic_dec_and_test(&device->local_cnt)) in _get_ldev_if_state() 174 wake_up(&device->misc_wait); in _get_ldev_if_state() 307 void tl_abort_disk_io(struct drbd_device *device) in tl_abort_disk_io() argument 309 struct drbd_connection *connection = first_peer_device(device)->connection; in tl_abort_disk_io() 316 if (req->device != device) in tl_abort_disk_io() 494 minor = device_to_minor(peer_device->device); in conn_lowest_minor() 698 err = __send_command(peer_device->connection, peer_device->device->vnr, in drbd_send_command() [all …]
|
D | drbd_int.h | 69 void tl_abort_disk_io(struct drbd_device *device); 103 #define __drbd_printk_device(level, device, fmt, args...) \ argument 104 dev_printk(level, disk_to_dev((device)->vdisk), fmt, ## args) 106 dev_printk(level, disk_to_dev((peer_device)->device->vdisk), fmt, ## args) 147 #define dynamic_drbd_dbg(device, fmt, args...) \ argument 148 dynamic_dev_dbg(disk_to_dev(device->vdisk), fmt, ## args) 150 #define D_ASSERT(device, exp) do { \ argument 152 drbd_err(device, "ASSERT( " #exp " ) in %s:%d\n", __FILE__, __LINE__); \ 163 drbd_err(device, "ASSERTION %s FAILED in %s\n", \ 185 _drbd_insert_fault(struct drbd_device *device, unsigned int type); [all …]
|
D | drbd_state.c | 35 struct drbd_device *device; member 52 static void after_state_ch(struct drbd_device *device, union drbd_state os, 57 static union drbd_state sanitize_state(struct drbd_device *device, union drbd_state os, 73 struct drbd_device *device = peer_device->device; in conn_all_vols_unconf() local 74 if (device->state.disk != D_DISKLESS || in conn_all_vols_unconf() 75 device->state.conn != C_STANDALONE || in conn_all_vols_unconf() 76 device->state.role != R_SECONDARY) { in conn_all_vols_unconf() 113 struct drbd_device *device = peer_device->device; in conn_highest_role() local 114 role = max_role(role, device->state.role); in conn_highest_role() 129 struct drbd_device *device = peer_device->device; in conn_highest_peer() local [all …]
|
D | drbd_proc.c | 63 static void drbd_get_syncer_progress(struct drbd_device *device, in drbd_get_syncer_progress() argument 69 typecheck(unsigned long, device->rs_total); in drbd_get_syncer_progress() 70 *rs_total = device->rs_total; in drbd_get_syncer_progress() 77 *bits_left = device->ov_left; in drbd_get_syncer_progress() 79 *bits_left = drbd_bm_total_weight(device) - device->rs_failed; in drbd_get_syncer_progress() 113 static void drbd_syncer_progress(struct drbd_device *device, struct seq_file *seq, in drbd_syncer_progress() argument 121 drbd_get_syncer_progress(device, state, &rs_total, &rs_left, &res); in drbd_syncer_progress() 164 i = (device->rs_last_mark + 2) % DRBD_SYNC_MARKS; in drbd_syncer_progress() 165 dt = (jiffies - device->rs_mark_time[i]) / HZ; in drbd_syncer_progress() 171 db = device->rs_mark_left[i] - rs_left; in drbd_syncer_progress() [all …]
|
D | drbd_bitmap.c | 118 static void __bm_print_lock_info(struct drbd_device *device, const char *func) in __bm_print_lock_info() argument 120 struct drbd_bitmap *b = device->bitmap; in __bm_print_lock_info() 123 drbd_err(device, "FIXME %s[%d] in %s, bitmap locked for '%s' by %s[%d]\n", in __bm_print_lock_info() 129 void drbd_bm_lock(struct drbd_device *device, char *why, enum bm_flag flags) in drbd_bm_lock() argument 131 struct drbd_bitmap *b = device->bitmap; in drbd_bm_lock() 135 drbd_err(device, "FIXME no bitmap in drbd_bm_lock!?\n"); in drbd_bm_lock() 142 drbd_warn(device, "%s[%d] going to '%s' but bitmap already locked for '%s' by %s[%d]\n", in drbd_bm_lock() 149 drbd_err(device, "FIXME bitmap already locked in bm_lock\n"); in drbd_bm_lock() 156 void drbd_bm_unlock(struct drbd_device *device) in drbd_bm_unlock() argument 158 struct drbd_bitmap *b = device->bitmap; in drbd_bm_unlock() [all …]
|
D | drbd_debugfs.c | 123 seq_printf(m, "%u\t%u\t", req->device->minor, req->device->vnr); in seq_print_minor_vnr_req() 129 struct drbd_device *device; in seq_print_resource_pending_meta_io() local 134 idr_for_each_entry(&resource->devices, device, i) { in seq_print_resource_pending_meta_io() 140 tmp = device->md_io; in seq_print_resource_pending_meta_io() 143 device->minor, device->vnr, in seq_print_resource_pending_meta_io() 157 struct drbd_device *device; in seq_print_waiting_for_AL() local 162 idr_for_each_entry(&resource->devices, device, i) { in seq_print_waiting_for_AL() 165 int n = atomic_read(&device->ap_actlog_cnt); in seq_print_waiting_for_AL() 167 spin_lock_irq(&device->resource->req_lock); in seq_print_waiting_for_AL() 168 req = list_first_entry_or_null(&device->pending_master_completion[1], in seq_print_waiting_for_AL() [all …]
|
/linux-4.4.14/include/linux/ |
D | pm_runtime.h | 34 extern int pm_generic_runtime_suspend(struct device *dev); 35 extern int pm_generic_runtime_resume(struct device *dev); 36 extern int pm_runtime_force_suspend(struct device *dev); 37 extern int pm_runtime_force_resume(struct device *dev); 39 extern int __pm_runtime_idle(struct device *dev, int rpmflags); 40 extern int __pm_runtime_suspend(struct device *dev, int rpmflags); 41 extern int __pm_runtime_resume(struct device *dev, int rpmflags); 42 extern int pm_schedule_suspend(struct device *dev, unsigned int delay); 43 extern int __pm_runtime_set_status(struct device *dev, unsigned int status); 44 extern int pm_runtime_barrier(struct device *dev); [all …]
|
D | device.h | 32 struct device; 108 struct device *dev_root; 114 int (*match)(struct device *dev, struct device_driver *drv); 115 int (*uevent)(struct device *dev, struct kobj_uevent_env *env); 116 int (*probe)(struct device *dev); 117 int (*remove)(struct device *dev); 118 void (*shutdown)(struct device *dev); 120 int (*online)(struct device *dev); 121 int (*offline)(struct device *dev); 123 int (*suspend)(struct device *dev, pm_message_t state); [all …]
|
D | attribute_container.h | 15 struct device; 23 int (*match)(struct attribute_container *, struct device *); 42 void attribute_container_create_device(struct device *dev, 44 struct device *, 45 struct device *)); 46 void attribute_container_add_device(struct device *dev, 48 struct device *, 49 struct device *)); 50 void attribute_container_remove_device(struct device *dev, 52 struct device *, [all …]
|
D | pm.h | 37 struct device; /* we have a circular dep with device.h */ 39 extern void pm_vt_switch_required(struct device *dev, bool required); 40 extern void pm_vt_switch_unregister(struct device *dev); 42 static inline void pm_vt_switch_required(struct device *dev, bool required) in pm_vt_switch_required() 45 static inline void pm_vt_switch_unregister(struct device *dev) in pm_vt_switch_unregister() 54 struct device; 296 int (*prepare)(struct device *dev); 297 void (*complete)(struct device *dev); 298 int (*suspend)(struct device *dev); 299 int (*resume)(struct device *dev); [all …]
|
D | pm_opp.h | 21 struct device; 35 int dev_pm_opp_get_opp_count(struct device *dev); 36 unsigned long dev_pm_opp_get_max_clock_latency(struct device *dev); 37 struct dev_pm_opp *dev_pm_opp_get_suspend_opp(struct device *dev); 39 struct dev_pm_opp *dev_pm_opp_find_freq_exact(struct device *dev, 43 struct dev_pm_opp *dev_pm_opp_find_freq_floor(struct device *dev, 46 struct dev_pm_opp *dev_pm_opp_find_freq_ceil(struct device *dev, 49 int dev_pm_opp_add(struct device *dev, unsigned long freq, 51 void dev_pm_opp_remove(struct device *dev, unsigned long freq); 53 int dev_pm_opp_enable(struct device *dev, unsigned long freq); [all …]
|
D | pm_qos.h | 68 struct device *dev; 138 enum pm_qos_flags_status __dev_pm_qos_flags(struct device *dev, s32 mask); 139 enum pm_qos_flags_status dev_pm_qos_flags(struct device *dev, s32 mask); 140 s32 __dev_pm_qos_read_value(struct device *dev); 141 s32 dev_pm_qos_read_value(struct device *dev); 142 int dev_pm_qos_add_request(struct device *dev, struct dev_pm_qos_request *req, 146 int dev_pm_qos_add_notifier(struct device *dev, 148 int dev_pm_qos_remove_notifier(struct device *dev, 152 void dev_pm_qos_constraints_init(struct device *dev); 153 void dev_pm_qos_constraints_destroy(struct device *dev); [all …]
|
D | component.h | 4 struct device; 7 int (*bind)(struct device *, struct device *, void *); 8 void (*unbind)(struct device *, struct device *, void *); 11 int component_add(struct device *, const struct component_ops *); 12 void component_del(struct device *, const struct component_ops *); 14 int component_bind_all(struct device *, void *); 15 void component_unbind_all(struct device *, void *); 20 int (*add_components)(struct device *, struct master *); 21 int (*bind)(struct device *); 22 void (*unbind)(struct device *); [all …]
|
D | dma-debug.h | 25 struct device; 37 extern void debug_dma_map_page(struct device *dev, struct page *page, 42 extern void debug_dma_mapping_error(struct device *dev, dma_addr_t dma_addr); 44 extern void debug_dma_unmap_page(struct device *dev, dma_addr_t addr, 47 extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, 50 extern void debug_dma_unmap_sg(struct device *dev, struct scatterlist *sglist, 53 extern void debug_dma_alloc_coherent(struct device *dev, size_t size, 56 extern void debug_dma_free_coherent(struct device *dev, size_t size, 59 extern void debug_dma_sync_single_for_cpu(struct device *dev, 63 extern void debug_dma_sync_single_for_device(struct device *dev, [all …]
|
D | dma-mapping.h | 19 void* (*alloc)(struct device *dev, size_t size, 22 void (*free)(struct device *dev, size_t size, 25 int (*mmap)(struct device *, struct vm_area_struct *, 28 int (*get_sgtable)(struct device *dev, struct sg_table *sgt, void *, 31 dma_addr_t (*map_page)(struct device *dev, struct page *page, 35 void (*unmap_page)(struct device *dev, dma_addr_t dma_handle, 42 int (*map_sg)(struct device *dev, struct scatterlist *sg, 45 void (*unmap_sg)(struct device *dev, 49 void (*sync_single_for_cpu)(struct device *dev, 52 void (*sync_single_for_device)(struct device *dev, [all …]
|
D | iommu.h | 37 struct device; 46 struct device *, unsigned long, int, void *); 160 int (*attach_dev)(struct iommu_domain *domain, struct device *dev); 161 void (*detach_dev)(struct iommu_domain *domain, struct device *dev); 169 int (*add_device)(struct device *dev); 170 void (*remove_device)(struct device *dev); 171 struct iommu_group *(*device_group)(struct device *dev); 178 void (*get_dm_regions)(struct device *dev, struct list_head *list); 179 void (*put_dm_regions)(struct device *dev, struct list_head *list); 191 int (*of_xlate)(struct device *dev, struct of_phandle_args *args); [all …]
|
D | pm_clock.h | 24 extern int pm_clk_runtime_suspend(struct device *dev); 25 extern int pm_clk_runtime_resume(struct device *dev); 34 static inline bool pm_clk_no_clocks(struct device *dev) in pm_clk_no_clocks() 40 extern void pm_clk_init(struct device *dev); 41 extern int pm_clk_create(struct device *dev); 42 extern void pm_clk_destroy(struct device *dev); 43 extern int pm_clk_add(struct device *dev, const char *con_id); 44 extern int pm_clk_add_clk(struct device *dev, struct clk *clk); 45 extern void pm_clk_remove(struct device *dev, const char *con_id); 46 extern int pm_clk_suspend(struct device *dev); [all …]
|
D | of_device.h | 11 struct device; 15 const struct of_device_id *matches, const struct device *dev); 16 extern void of_device_make_bus_id(struct device *dev); 23 static inline int of_driver_match_device(struct device *dev, in of_driver_match_device() 36 extern const void *of_device_get_match_data(const struct device *dev); 38 extern ssize_t of_device_get_modalias(struct device *dev, 41 extern void of_device_uevent(struct device *dev, struct kobj_uevent_env *env); 42 extern int of_device_uevent_modalias(struct device *dev, struct kobj_uevent_env *env); 44 static inline void of_device_node_put(struct device *dev) in of_device_node_put() 51 struct device *cpu_dev; in of_cpu_device_node_get() [all …]
|
D | pm_domain.h | 29 bool (*stop_ok)(struct device *dev); 33 int (*start)(struct device *dev); 34 int (*stop)(struct device *dev); 35 int (*save_state)(struct device *dev); 36 int (*restore_state)(struct device *dev); 37 bool (*active_wakeup)(struct device *dev); 65 struct device *dev); 67 struct device *dev); 93 struct device *dev; 108 static inline struct generic_pm_domain_data *dev_gpd_data(struct device *dev) in dev_gpd_data() [all …]
|
D | devfreq.h | 86 int (*target)(struct device *dev, unsigned long *freq, u32 flags); 87 int (*get_dev_status)(struct device *dev, 89 int (*get_cur_freq)(struct device *dev, unsigned long *freq); 90 void (*exit)(struct device *dev); 159 struct device dev; 183 extern struct devfreq *devfreq_add_device(struct device *dev, 188 extern struct devfreq *devm_devfreq_add_device(struct device *dev, 192 extern void devm_devfreq_remove_device(struct device *dev, 200 extern struct dev_pm_opp *devfreq_recommended_opp(struct device *dev, 202 extern int devfreq_register_opp_notifier(struct device *dev, [all …]
|
D | raid_class.h | 16 int (*is_raid)(struct device *); 17 void (*get_resync)(struct device *); 18 void (*get_state)(struct device *); 56 raid_set_##attr(struct raid_template *r, struct device *dev, type value) { \ 57 struct device *device = \ 60 BUG_ON(!device); \ 61 rd = dev_get_drvdata(device); \ 65 raid_get_##attr(struct raid_template *r, struct device *dev) { \ 66 struct device *device = \ 69 BUG_ON(!device); \ [all …]
|
D | pm_wakeup.h | 81 static inline bool device_can_wakeup(struct device *dev) in device_can_wakeup() 86 static inline bool device_may_wakeup(struct device *dev) in device_may_wakeup() 100 extern int device_wakeup_enable(struct device *dev); 101 extern int device_wakeup_disable(struct device *dev); 102 extern void device_set_wakeup_capable(struct device *dev, bool capable); 103 extern int device_init_wakeup(struct device *dev, bool val); 104 extern int device_set_wakeup_enable(struct device *dev, bool enable); 106 extern void pm_stay_awake(struct device *dev); 108 extern void pm_relax(struct device *dev); 110 extern void pm_wakeup_event(struct device *dev, unsigned int msec); [all …]
|
D | swiotlb.h | 6 struct device; 42 extern phys_addr_t swiotlb_tbl_map_single(struct device *hwdev, 47 extern void swiotlb_tbl_unmap_single(struct device *hwdev, 51 extern void swiotlb_tbl_sync_single(struct device *hwdev, 58 *swiotlb_alloc_coherent(struct device *hwdev, size_t size, 62 swiotlb_free_coherent(struct device *hwdev, size_t size, 65 extern dma_addr_t swiotlb_map_page(struct device *dev, struct page *page, 69 extern void swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, 74 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sg, int nents, 78 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents, [all …]
|
D | acpi_dma.h | 32 struct device *dev; 46 struct device *dev; 62 int acpi_dma_controller_register(struct device *dev, 66 int acpi_dma_controller_free(struct device *dev); 67 int devm_acpi_dma_controller_register(struct device *dev, 71 void devm_acpi_dma_controller_free(struct device *dev); 73 struct dma_chan *acpi_dma_request_slave_chan_by_index(struct device *dev, 75 struct dma_chan *acpi_dma_request_slave_chan_by_name(struct device *dev, 82 static inline int acpi_dma_controller_register(struct device *dev, in acpi_dma_controller_register() 89 static inline int acpi_dma_controller_free(struct device *dev) in acpi_dma_controller_free() [all …]
|
D | property.h | 19 struct device; 36 bool device_property_present(struct device *dev, const char *propname); 37 int device_property_read_u8_array(struct device *dev, const char *propname, 39 int device_property_read_u16_array(struct device *dev, const char *propname, 41 int device_property_read_u32_array(struct device *dev, const char *propname, 43 int device_property_read_u64_array(struct device *dev, const char *propname, 45 int device_property_read_string_array(struct device *dev, const char *propname, 47 int device_property_read_string(struct device *dev, const char *propname, 49 int device_property_match_string(struct device *dev, 73 struct fwnode_handle *device_get_next_child_node(struct device *dev, [all …]
|
/linux-4.4.14/drivers/s390/char/ |
D | tape_core.c | 93 tape_medium_state_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_medium_state_show() 105 tape_first_minor_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_first_minor_show() 117 tape_state_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_state_show() 130 tape_operation_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_operation_show() 157 tape_blocksize_show(struct device *dev, struct device_attribute *attr, char *buf) in tape_blocksize_show() 186 tape_state_set(struct tape_device *device, enum tape_state newstate) in tape_state_set() argument 190 if (device->tape_state == TS_NOT_OPER) { in tape_state_set() 194 DBF_EVENT(4, "ts. dev: %x\n", device->first_minor); in tape_state_set() 196 if (device->tape_state < TS_SIZE && device->tape_state >=0 ) in tape_state_set() 197 str = tape_state_verbose[device->tape_state]; in tape_state_set() [all …]
|
D | tape_char.c | 63 tapechar_setup_device(struct tape_device * device) in tapechar_setup_device() argument 67 sprintf(device_name, "ntibm%i", device->first_minor / 2); in tapechar_setup_device() 68 device->nt = register_tape_dev( in tapechar_setup_device() 69 &device->cdev->dev, in tapechar_setup_device() 70 MKDEV(tapechar_major, device->first_minor), in tapechar_setup_device() 76 device->rt = register_tape_dev( in tapechar_setup_device() 77 &device->cdev->dev, in tapechar_setup_device() 78 MKDEV(tapechar_major, device->first_minor + 1), in tapechar_setup_device() 88 tapechar_cleanup_device(struct tape_device *device) in tapechar_cleanup_device() argument 90 unregister_tape_dev(&device->cdev->dev, device->rt); in tapechar_cleanup_device() [all …]
|
D | tape_std.c | 38 struct tape_device * device; in tape_std_assign_timeout() local 42 device = request->device; in tape_std_assign_timeout() 43 BUG_ON(!device); in tape_std_assign_timeout() 46 device->cdev_id); in tape_std_assign_timeout() 47 rc = tape_cancel_io(device, request); in tape_std_assign_timeout() 50 "%i\n", device->cdev_id, rc); in tape_std_assign_timeout() 54 tape_std_assign(struct tape_device *device) in tape_std_assign() argument 79 rc = tape_do_io_interruptible(device, request); in tape_std_assign() 86 device->cdev_id); in tape_std_assign() 88 DBF_EVENT(3, "%08x: Tape assigned\n", device->cdev_id); in tape_std_assign() [all …]
|
D | tape_34xx.c | 57 struct tape_device *device = request->device; in __tape_34xx_medium_sense() local 70 tape_med_state_set(device, MS_UNLOADED); in __tape_34xx_medium_sense() 72 tape_med_state_set(device, MS_LOADED); in __tape_34xx_medium_sense() 75 device->tape_generic_status |= GMT_WR_PROT(~0); in __tape_34xx_medium_sense() 77 device->tape_generic_status &= ~GMT_WR_PROT(~0); in __tape_34xx_medium_sense() 84 static int tape_34xx_medium_sense(struct tape_device *device) in tape_34xx_medium_sense() argument 97 rc = tape_do_io_interruptible(device, request); in tape_34xx_medium_sense() 102 static void tape_34xx_medium_sense_async(struct tape_device *device) in tape_34xx_medium_sense_async() argument 116 tape_do_io_async(device, request); in tape_34xx_medium_sense_async() 120 struct tape_device *device; member [all …]
|
D | tape_3590.c | 102 static int crypt_supported(struct tape_device *device) in crypt_supported() argument 104 return TAPE390_CRYPT_SUPPORTED(TAPE_3590_CRYPT_INFO(device)); in crypt_supported() 107 static int crypt_enabled(struct tape_device *device) in crypt_enabled() argument 109 return TAPE390_CRYPT_ON(TAPE_3590_CRYPT_INFO(device)); in crypt_enabled() 200 static int tape_3592_kekl_query(struct tape_device *device, in tape_3592_kekl_query() argument 225 rc = tape_do_io(device, request); in tape_3592_kekl_query() 241 static int tape_3592_ioctl_kekl_query(struct tape_device *device, in tape_3592_ioctl_kekl_query() argument 248 if (!crypt_supported(device)) in tape_3592_ioctl_kekl_query() 250 if (!crypt_enabled(device)) in tape_3592_ioctl_kekl_query() 255 rc = tape_3592_kekl_query(device, ext_kekls); in tape_3592_ioctl_kekl_query() [all …]
|
D | tape_proc.c | 40 struct tape_device *device; in tape_proc_show() local 50 device = tape_find_device(n); in tape_proc_show() 51 if (IS_ERR(device)) in tape_proc_show() 53 spin_lock_irq(get_ccwdev_lock(device->cdev)); in tape_proc_show() 55 seq_printf(m, "%-10.10s ", dev_name(&device->cdev->dev)); in tape_proc_show() 56 seq_printf(m, "%04X/", device->cdev->id.cu_type); in tape_proc_show() 57 seq_printf(m, "%02X\t", device->cdev->id.cu_model); in tape_proc_show() 58 seq_printf(m, "%04X/", device->cdev->id.dev_type); in tape_proc_show() 59 seq_printf(m, "%02X\t\t", device->cdev->id.dev_model); in tape_proc_show() 60 if (device->char_data.block_size == 0) in tape_proc_show() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
D | nv40.c | 36 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv40_gr_units() 47 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 20, align, in nv40_gr_object_bind() 79 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv40_gr_chan_bind() 84 nv40_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv40_gr_chan_bind() 97 struct nvkm_device *device = subdev->device; in nv40_gr_chan_fini() local 101 nvkm_mask(device, 0x400720, 0x00000001, 0x00000000); in nv40_gr_chan_fini() 103 if (nvkm_rd32(device, 0x40032c) == inst) { in nv40_gr_chan_fini() 105 nvkm_wr32(device, 0x400720, 0x00000000); in nv40_gr_chan_fini() 106 nvkm_wr32(device, 0x400784, inst); in nv40_gr_chan_fini() 107 nvkm_mask(device, 0x400310, 0x00000020, 0x00000020); in nv40_gr_chan_fini() [all …]
|
D | nv50.c | 33 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv50_gr_units() 44 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, in nv50_gr_object_bind() 71 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv50_gr_chan_bind() 75 nv50_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv50_gr_chan_bind() 241 struct nvkm_device *device = subdev->device; in nv50_gr_prop_trap() local 242 u32 e0c = nvkm_rd32(device, ustatus_addr + 0x04); in nv50_gr_prop_trap() 243 u32 e10 = nvkm_rd32(device, ustatus_addr + 0x08); in nv50_gr_prop_trap() 244 u32 e14 = nvkm_rd32(device, ustatus_addr + 0x0c); in nv50_gr_prop_trap() 245 u32 e18 = nvkm_rd32(device, ustatus_addr + 0x10); in nv50_gr_prop_trap() 246 u32 e1c = nvkm_rd32(device, ustatus_addr + 0x14); in nv50_gr_prop_trap() [all …]
|
D | gm204.c | 241 struct nvkm_device *device = gr->base.engine.subdev.device; in gm204_gr_init() local 248 tmp = nvkm_rd32(device, 0x100c80); /*XXX: mask? */ in gm204_gr_init() 249 nvkm_wr32(device, 0x418880, 0x00001000 | (tmp & 0x00000fff)); in gm204_gr_init() 250 nvkm_wr32(device, 0x418890, 0x00000000); in gm204_gr_init() 251 nvkm_wr32(device, 0x418894, 0x00000000); in gm204_gr_init() 252 nvkm_wr32(device, 0x4188b4, nvkm_memory_addr(gr->unk4188b4) >> 8); in gm204_gr_init() 253 nvkm_wr32(device, 0x4188b8, nvkm_memory_addr(gr->unk4188b8) >> 8); in gm204_gr_init() 254 nvkm_mask(device, 0x4188b0, 0x00040000, 0x00040000); in gm204_gr_init() 257 nvkm_wr32(device, 0x100cc8, nvkm_memory_addr(gr->unk4188b4) >> 8); in gm204_gr_init() 258 nvkm_wr32(device, 0x100ccc, nvkm_memory_addr(gr->unk4188b8) >> 8); in gm204_gr_init() [all …]
|
D | nv20.c | 33 struct nvkm_device *device = gr->base.engine.subdev.device; in nv20_gr_chan_fini() local 37 nvkm_mask(device, 0x400720, 0x00000001, 0x00000000); in nv20_gr_chan_fini() 38 if (nvkm_rd32(device, 0x400144) & 0x00010000) in nv20_gr_chan_fini() 39 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini() 41 nvkm_wr32(device, 0x400784, inst >> 4); in nv20_gr_chan_fini() 42 nvkm_wr32(device, 0x400788, 0x00000002); in nv20_gr_chan_fini() 43 nvkm_msec(device, 2000, in nv20_gr_chan_fini() 44 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_chan_fini() 47 nvkm_wr32(device, 0x400144, 0x10000000); in nv20_gr_chan_fini() 48 nvkm_mask(device, 0x400148, 0xff000000, 0x1f000000); in nv20_gr_chan_fini() [all …]
|
D | gk104.c | 183 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_gr_init() local 190 nvkm_wr32(device, GPC_BCAST(0x0880), 0x00000000); in gk104_gr_init() 191 nvkm_wr32(device, GPC_BCAST(0x08a4), 0x00000000); in gk104_gr_init() 192 nvkm_wr32(device, GPC_BCAST(0x0888), 0x00000000); in gk104_gr_init() 193 nvkm_wr32(device, GPC_BCAST(0x088c), 0x00000000); in gk104_gr_init() 194 nvkm_wr32(device, GPC_BCAST(0x0890), 0x00000000); in gk104_gr_init() 195 nvkm_wr32(device, GPC_BCAST(0x0894), 0x00000000); in gk104_gr_init() 196 nvkm_wr32(device, GPC_BCAST(0x08b4), nvkm_memory_addr(gr->unk4188b4) >> 8); in gk104_gr_init() 197 nvkm_wr32(device, GPC_BCAST(0x08b8), nvkm_memory_addr(gr->unk4188b8) >> 8); in gk104_gr_init() 201 nvkm_wr32(device, GPC_UNIT(0, 0x3018), 0x00000001); in gk104_gr_init() [all …]
|
D | gf100.c | 46 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_gr_zbc_clear_color() local 48 nvkm_wr32(device, 0x405804, gr->zbc_color[zbc].ds[0]); in gf100_gr_zbc_clear_color() 49 nvkm_wr32(device, 0x405808, gr->zbc_color[zbc].ds[1]); in gf100_gr_zbc_clear_color() 50 nvkm_wr32(device, 0x40580c, gr->zbc_color[zbc].ds[2]); in gf100_gr_zbc_clear_color() 51 nvkm_wr32(device, 0x405810, gr->zbc_color[zbc].ds[3]); in gf100_gr_zbc_clear_color() 53 nvkm_wr32(device, 0x405814, gr->zbc_color[zbc].format); in gf100_gr_zbc_clear_color() 54 nvkm_wr32(device, 0x405820, zbc); in gf100_gr_zbc_clear_color() 55 nvkm_wr32(device, 0x405824, 0x00000004); /* TRIGGER | WRITE | COLOR */ in gf100_gr_zbc_clear_color() 62 struct nvkm_ltc *ltc = gr->base.engine.subdev.device->ltc; in gf100_gr_zbc_color_get() 97 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_gr_zbc_clear_depth() local [all …]
|
D | gm107.c | 294 struct nvkm_device *device = gr->base.engine.subdev.device; in gm107_gr_init_bios() local 295 struct nvkm_bios *bios = device->bios; in gm107_gr_init_bios() 303 nvkm_wr32(device, regs[E].ctrl, infoE.data); in gm107_gr_init_bios() 305 nvkm_wr32(device, regs[E].data, infoX.data); in gm107_gr_init_bios() 313 struct nvkm_device *device = gr->base.engine.subdev.device; in gm107_gr_init() local 320 nvkm_wr32(device, GPC_BCAST(0x0880), 0x00000000); in gm107_gr_init() 321 nvkm_wr32(device, GPC_BCAST(0x0890), 0x00000000); in gm107_gr_init() 322 nvkm_wr32(device, GPC_BCAST(0x0894), 0x00000000); in gm107_gr_init() 323 nvkm_wr32(device, GPC_BCAST(0x08b4), nvkm_memory_addr(gr->unk4188b4) >> 8); in gm107_gr_init() 324 nvkm_wr32(device, GPC_BCAST(0x08b8), nvkm_memory_addr(gr->unk4188b8) >> 8); in gm107_gr_init() [all …]
|
D | nv04.c | 445 nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx1() argument 447 int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7; in nv04_gr_set_ctx1() 450 tmp = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx1() 453 nvkm_wr32(device, 0x700000 + inst, tmp); in nv04_gr_set_ctx1() 455 nvkm_wr32(device, NV04_PGRAPH_CTX_SWITCH1, tmp); in nv04_gr_set_ctx1() 456 nvkm_wr32(device, NV04_PGRAPH_CTX_CACHE1 + (subc << 2), tmp); in nv04_gr_set_ctx1() 460 nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value) in nv04_gr_set_ctx_val() argument 465 ctx1 = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx_val() 469 tmp = nvkm_rd32(device, 0x70000c + inst); in nv04_gr_set_ctx_val() 472 nvkm_wr32(device, 0x70000c + inst, tmp); in nv04_gr_set_ctx_val() [all …]
|
D | nv10.c | 417 nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr); \ 419 state[__i] = nvkm_rd32(device, NV10_PGRAPH_PIPE_DATA); \ 425 nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr); \ 427 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, state[__i]); \ 433 struct nvkm_device *device = chan->object.engine->subdev.device; in nv17_gr_mthd_lma_window() local 447 PIPE_SAVE(device, pipe_0x0040, 0x0040); in nv17_gr_mthd_lma_window() 448 PIPE_SAVE(device, pipe->pipe_0x0200, 0x0200); in nv17_gr_mthd_lma_window() 450 PIPE_RESTORE(device, chan->lma_window, 0x6790); in nv17_gr_mthd_lma_window() 454 xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0); in nv17_gr_mthd_lma_window() 455 xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1); in nv17_gr_mthd_lma_window() [all …]
|
D | nv30.c | 35 ret = nvkm_memory_new(gr->base.engine.subdev.device, in nv30_gr_chan_new() 106 struct nvkm_device *device = gr->base.engine.subdev.device; in nv30_gr_init() local 108 nvkm_wr32(device, NV20_PGRAPH_CHANNEL_CTX_TABLE, in nv30_gr_init() 111 nvkm_wr32(device, NV03_PGRAPH_INTR , 0xFFFFFFFF); in nv30_gr_init() 112 nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); in nv30_gr_init() 114 nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF); in nv30_gr_init() 115 nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x00000000); in nv30_gr_init() 116 nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x401287c0); in nv30_gr_init() 117 nvkm_wr32(device, 0x400890, 0x01b463ff); in nv30_gr_init() 118 nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xf2de0475); in nv30_gr_init() [all …]
|
D | gm20b.c | 32 struct nvkm_device *device = gr->base.engine.subdev.device; in gm20b_gr_init_gpc_mmu() local 37 nvkm_wr32(device, 0x100ce4, 0xffffffff); in gm20b_gr_init_gpc_mmu() 41 val = nvkm_rd32(device, 0x100c80); in gm20b_gr_init_gpc_mmu() 43 nvkm_wr32(device, 0x418880, val); in gm20b_gr_init_gpc_mmu() 44 nvkm_wr32(device, 0x418890, 0); in gm20b_gr_init_gpc_mmu() 45 nvkm_wr32(device, 0x418894, 0); in gm20b_gr_init_gpc_mmu() 47 nvkm_wr32(device, 0x4188b0, nvkm_rd32(device, 0x100cc4)); in gm20b_gr_init_gpc_mmu() 48 nvkm_wr32(device, 0x4188b4, nvkm_rd32(device, 0x100cc8)); in gm20b_gr_init_gpc_mmu() 49 nvkm_wr32(device, 0x4188b8, nvkm_rd32(device, 0x100ccc)); in gm20b_gr_init_gpc_mmu() 51 nvkm_wr32(device, 0x4188ac, nvkm_rd32(device, 0x100800)); in gm20b_gr_init_gpc_mmu() [all …]
|
D | ctxnv40.c | 120 nv40_gr_vs_count(struct nvkm_device *device) in nv40_gr_vs_count() argument 123 switch (device->chipset) { in nv40_gr_vs_count() 161 struct nvkm_device *device = ctx->device; in nv40_gr_construct_general() local 188 if (device->chipset == 0x40) { in nv40_gr_construct_general() 209 if (nv44_gr_class(ctx->device)) { in nv40_gr_construct_general() 218 if (!nv44_gr_class(ctx->device)) { in nv40_gr_construct_general() 224 if (device->chipset == 0x4c || in nv40_gr_construct_general() 225 (device->chipset & 0xf0) == 0x60) in nv40_gr_construct_general() 232 switch (device->chipset) { in nv40_gr_construct_general() 241 switch (device->chipset) { in nv40_gr_construct_general() [all …]
|
D | nv44.c | 34 struct nvkm_device *device = gr->base.engine.subdev.device; in nv44_gr_tile() local 35 struct nvkm_fifo *fifo = device->fifo; in nv44_gr_tile() 41 switch (device->chipset) { in nv44_gr_tile() 44 nvkm_wr32(device, NV20_PGRAPH_TSIZE(i), tile->pitch); in nv44_gr_tile() 45 nvkm_wr32(device, NV20_PGRAPH_TLIMIT(i), tile->limit); in nv44_gr_tile() 46 nvkm_wr32(device, NV20_PGRAPH_TILE(i), tile->addr); in nv44_gr_tile() 53 nvkm_wr32(device, NV47_PGRAPH_TSIZE(i), tile->pitch); in nv44_gr_tile() 54 nvkm_wr32(device, NV47_PGRAPH_TLIMIT(i), tile->limit); in nv44_gr_tile() 55 nvkm_wr32(device, NV47_PGRAPH_TILE(i), tile->addr); in nv44_gr_tile() 56 nvkm_wr32(device, NV40_PGRAPH_TSIZE1(i), tile->pitch); in nv44_gr_tile() [all …]
|
D | gk20a.c | 153 struct nvkm_device *device = subdev->device; in gk20a_gr_wait_mem_scrubbing() local 155 if (nvkm_msec(device, 2000, in gk20a_gr_wait_mem_scrubbing() 156 if (!(nvkm_rd32(device, 0x40910c) & 0x00000006)) in gk20a_gr_wait_mem_scrubbing() 163 if (nvkm_msec(device, 2000, in gk20a_gr_wait_mem_scrubbing() 164 if (!(nvkm_rd32(device, 0x41a10c) & 0x00000006)) in gk20a_gr_wait_mem_scrubbing() 177 struct nvkm_device *device = gr->base.engine.subdev.device; in gk20a_gr_set_hww_esr_report_mask() local 178 nvkm_wr32(device, 0x419e44, 0x1ffffe); in gk20a_gr_set_hww_esr_report_mask() 179 nvkm_wr32(device, 0x419e4c, 0x7f); in gk20a_gr_set_hww_esr_report_mask() 185 struct nvkm_device *device = gr->base.engine.subdev.device; in gk20a_gr_init() local 193 nvkm_wr32(device, 0x40802c, 0x1); in gk20a_gr_init() [all …]
|
D | ctxnv50.c | 255 nv50_grctx_fill(struct nvkm_device *device, struct nvkm_gpuobj *mem) in nv50_grctx_fill() argument 258 .device = device, in nv50_grctx_fill() 265 nv50_grctx_init(struct nvkm_device *device, u32 *size) in nv50_grctx_init() argument 269 .device = device, in nv50_grctx_init() 279 nvkm_wr32(device, 0x400324, 0); in nv50_grctx_init() 281 nvkm_wr32(device, 0x400328, ctxprog[i]); in nv50_grctx_init() 298 struct nvkm_device *device = ctx->device; in nv50_gr_construct_mmio() local 301 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_construct_mmio() 307 if (device->chipset == 0x50) { in nv50_gr_construct_mmio() 314 if (IS_NVA3F(device->chipset)) in nv50_gr_construct_mmio() [all …]
|
D | ctxgk104.c | 877 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_grctx_generate_unkn() local 878 nvkm_mask(device, 0x418c6c, 0x00000001, 0x00000001); in gk104_grctx_generate_unkn() 879 nvkm_mask(device, 0x41980c, 0x00000010, 0x00000010); in gk104_grctx_generate_unkn() 880 nvkm_mask(device, 0x41be08, 0x00000004, 0x00000004); in gk104_grctx_generate_unkn() 881 nvkm_mask(device, 0x4064c0, 0x80000000, 0x80000000); in gk104_grctx_generate_unkn() 882 nvkm_mask(device, 0x405800, 0x08000000, 0x08000000); in gk104_grctx_generate_unkn() 883 nvkm_mask(device, 0x419c00, 0x00000008, 0x00000008); in gk104_grctx_generate_unkn() 889 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_grctx_generate_r418bb8() local 926 nvkm_wr32(device, 0x418bb8, (gr->tpc_total << 8) | in gk104_grctx_generate_r418bb8() 929 nvkm_wr32(device, 0x418b08 + (i * 4), data[i]); in gk104_grctx_generate_r418bb8() [all …]
|
D | ctxgf100.c | 1007 struct nvkm_device *device = info->gr->base.engine.subdev.device; in gf100_grctx_mmio_item() local 1024 nvkm_wr32(device, addr, data); in gf100_grctx_mmio_item() 1088 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_grctx_generate_tpcid() local 1094 nvkm_wr32(device, TPC_UNIT(gpc, tpc, 0x698), id); in gf100_grctx_generate_tpcid() 1095 nvkm_wr32(device, TPC_UNIT(gpc, tpc, 0x4e8), id); in gf100_grctx_generate_tpcid() 1096 nvkm_wr32(device, GPC_UNIT(gpc, 0x0c10 + tpc * 4), id); in gf100_grctx_generate_tpcid() 1097 nvkm_wr32(device, TPC_UNIT(gpc, tpc, 0x088), id); in gf100_grctx_generate_tpcid() 1101 nvkm_wr32(device, GPC_UNIT(gpc, 0x0c08), gr->tpc_nr[gpc]); in gf100_grctx_generate_tpcid() 1102 nvkm_wr32(device, GPC_UNIT(gpc, 0x0c8c), gr->tpc_nr[gpc]); in gf100_grctx_generate_tpcid() 1110 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_grctx_generate_r406028() local [all …]
|
D | g84.c | 117 struct nvkm_device *device = subdev->device; in g84_gr_tlb_flush() local 118 struct nvkm_timer *tmr = device->timer; in g84_gr_tlb_flush() 126 nvkm_mask(device, 0x400500, 0x00000001, 0x00000000); in g84_gr_tlb_flush() 132 for (tmp = nvkm_rd32(device, 0x400380); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush() 137 for (tmp = nvkm_rd32(device, 0x400384); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush() 142 for (tmp = nvkm_rd32(device, 0x400388); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush() 152 tmp = nvkm_rd32(device, 0x400700); in g84_gr_tlb_flush() 157 nvkm_rd32(device, 0x400380)); in g84_gr_tlb_flush() 159 nvkm_rd32(device, 0x400384)); in g84_gr_tlb_flush() 161 nvkm_rd32(device, 0x400388)); in g84_gr_tlb_flush() [all …]
|
D | ctxgm204.c | 924 struct nvkm_device *device = gr->base.engine.subdev.device; in gm204_grctx_generate_tpcid() local 930 nvkm_wr32(device, TPC_UNIT(gpc, tpc, 0x698), id); in gm204_grctx_generate_tpcid() 931 nvkm_wr32(device, GPC_UNIT(gpc, 0x0c10 + tpc * 4), id); in gm204_grctx_generate_tpcid() 932 nvkm_wr32(device, TPC_UNIT(gpc, tpc, 0x088), id); in gm204_grctx_generate_tpcid() 942 struct nvkm_device *device = gr->base.engine.subdev.device; in gm204_grctx_generate_rop_active_fbps() local 943 const u32 fbp_count = nvkm_rd32(device, 0x12006c); in gm204_grctx_generate_rop_active_fbps() 944 nvkm_mask(device, 0x408850, 0x0000000f, fbp_count); /* zrop */ in gm204_grctx_generate_rop_active_fbps() 945 nvkm_mask(device, 0x408958, 0x0000000f, fbp_count); /* crop */ in gm204_grctx_generate_rop_active_fbps() 951 struct nvkm_device *device = gr->base.engine.subdev.device; in gm204_grctx_generate_405b60() local 975 nvkm_wr32(device, 0x405b60 + (i * 4), dist[i]); in gm204_grctx_generate_405b60() [all …]
|
/linux-4.4.14/drivers/base/power/ |
D | power.h | 3 static inline void device_pm_init_common(struct device *dev) in device_pm_init_common() 14 static inline void pm_runtime_early_init(struct device *dev) in pm_runtime_early_init() 20 extern void pm_runtime_init(struct device *dev); 21 extern void pm_runtime_remove(struct device *dev); 24 struct device *dev; 34 extern int device_wakeup_attach_irq(struct device *dev, 36 extern void device_wakeup_detach_irq(struct device *dev); 43 device_wakeup_attach_irq(struct device *dev, in device_wakeup_attach_irq() 49 static inline void device_wakeup_detach_irq(struct device *dev) in device_wakeup_detach_irq() 67 extern int dpm_sysfs_add(struct device *dev); [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/disp/ |
D | vga.c | 27 nvkm_rdport(struct nvkm_device *device, int head, u16 port) in nvkm_rdport() argument 29 if (device->card_type >= NV_50) in nvkm_rdport() 30 return nvkm_rd08(device, 0x601000 + port); in nvkm_rdport() 35 return nvkm_rd08(device, 0x601000 + (head * 0x2000) + port); in nvkm_rdport() 40 if (device->card_type < NV_40) in nvkm_rdport() 42 return nvkm_rd08(device, 0x0c0000 + (head * 0x2000) + port); in nvkm_rdport() 49 nvkm_wrport(struct nvkm_device *device, int head, u16 port, u8 data) in nvkm_wrport() argument 51 if (device->card_type >= NV_50) in nvkm_wrport() 52 nvkm_wr08(device, 0x601000 + port, data); in nvkm_wrport() 57 nvkm_wr08(device, 0x601000 + (head * 0x2000) + port, data); in nvkm_wrport() [all …]
|
D | hdmig84.c | 34 struct nvkm_device *device = disp->base.engine.subdev.device; in g84_hdmi_ctrl() local 58 nvkm_mask(device, 0x6165a4 + hoff, 0x40000000, 0x00000000); in g84_hdmi_ctrl() 59 nvkm_mask(device, 0x616520 + hoff, 0x00000001, 0x00000000); in g84_hdmi_ctrl() 60 nvkm_mask(device, 0x616500 + hoff, 0x00000001, 0x00000000); in g84_hdmi_ctrl() 65 nvkm_mask(device, 0x616520 + hoff, 0x00000001, 0x00000000); in g84_hdmi_ctrl() 66 nvkm_wr32(device, 0x616528 + hoff, 0x000d0282); in g84_hdmi_ctrl() 67 nvkm_wr32(device, 0x61652c + hoff, 0x0000006f); in g84_hdmi_ctrl() 68 nvkm_wr32(device, 0x616530 + hoff, 0x00000000); in g84_hdmi_ctrl() 69 nvkm_wr32(device, 0x616534 + hoff, 0x00000000); in g84_hdmi_ctrl() 70 nvkm_wr32(device, 0x616538 + hoff, 0x00000000); in g84_hdmi_ctrl() [all …]
|
D | hdmigt215.c | 35 struct nvkm_device *device = disp->base.engine.subdev.device; in gt215_hdmi_ctrl() local 59 nvkm_mask(device, 0x61c5a4 + soff, 0x40000000, 0x00000000); in gt215_hdmi_ctrl() 60 nvkm_mask(device, 0x61c520 + soff, 0x00000001, 0x00000000); in gt215_hdmi_ctrl() 61 nvkm_mask(device, 0x61c500 + soff, 0x00000001, 0x00000000); in gt215_hdmi_ctrl() 66 nvkm_mask(device, 0x61c520 + soff, 0x00000001, 0x00000000); in gt215_hdmi_ctrl() 67 nvkm_wr32(device, 0x61c528 + soff, 0x000d0282); in gt215_hdmi_ctrl() 68 nvkm_wr32(device, 0x61c52c + soff, 0x0000006f); in gt215_hdmi_ctrl() 69 nvkm_wr32(device, 0x61c530 + soff, 0x00000000); in gt215_hdmi_ctrl() 70 nvkm_wr32(device, 0x61c534 + soff, 0x00000000); in gt215_hdmi_ctrl() 71 nvkm_wr32(device, 0x61c538 + soff, 0x00000000); in gt215_hdmi_ctrl() [all …]
|
D | rootgf119.c | 37 struct nvkm_device *device = disp->base.engine.subdev.device; in gf119_disp_root_scanoutpos() local 38 const u32 total = nvkm_rd32(device, 0x640414 + (head * 0x300)); in gf119_disp_root_scanoutpos() 39 const u32 blanke = nvkm_rd32(device, 0x64041c + (head * 0x300)); in gf119_disp_root_scanoutpos() 40 const u32 blanks = nvkm_rd32(device, 0x640420 + (head * 0x300)); in gf119_disp_root_scanoutpos() 58 nvkm_rd32(device, 0x616340 + (head * 0x800)) & 0xffff; in gf119_disp_root_scanoutpos() 61 nvkm_rd32(device, 0x616344 + (head * 0x800)) & 0xffff; in gf119_disp_root_scanoutpos() 71 struct nvkm_device *device = root->disp->base.engine.subdev.device; in gf119_disp_root_fini() local 73 nvkm_wr32(device, 0x6100b0, 0x00000000); in gf119_disp_root_fini() 80 struct nvkm_device *device = disp->base.engine.subdev.device; in gf119_disp_root_init() local 91 tmp = nvkm_rd32(device, 0x616104 + (i * 0x800)); in gf119_disp_root_init() [all …]
|
D | dmacgf119.c | 44 struct nvkm_device *device = subdev->device; in gf119_disp_dmac_fini() local 48 nvkm_mask(device, 0x610490 + (chid * 0x0010), 0x00001010, 0x00001000); in gf119_disp_dmac_fini() 49 nvkm_mask(device, 0x610490 + (chid * 0x0010), 0x00000003, 0x00000000); in gf119_disp_dmac_fini() 50 if (nvkm_msec(device, 2000, in gf119_disp_dmac_fini() 51 if (!(nvkm_rd32(device, 0x610490 + (chid * 0x10)) & 0x001e0000)) in gf119_disp_dmac_fini() 55 nvkm_rd32(device, 0x610490 + (chid * 0x10))); in gf119_disp_dmac_fini() 59 nvkm_mask(device, 0x610090, 0x00000001 << chid, 0x00000000); in gf119_disp_dmac_fini() 60 nvkm_mask(device, 0x6100a0, 0x00000001 << chid, 0x00000000); in gf119_disp_dmac_fini() 68 struct nvkm_device *device = subdev->device; in gf119_disp_dmac_init() local 72 nvkm_mask(device, 0x6100a0, 0x00000001 << chid, 0x00000001 << chid); in gf119_disp_dmac_init() [all …]
|
D | coregf119.c | 179 struct nvkm_device *device = subdev->device; in gf119_disp_core_fini() local 182 nvkm_mask(device, 0x610490, 0x00000010, 0x00000000); in gf119_disp_core_fini() 183 nvkm_mask(device, 0x610490, 0x00000003, 0x00000000); in gf119_disp_core_fini() 184 if (nvkm_msec(device, 2000, in gf119_disp_core_fini() 185 if (!(nvkm_rd32(device, 0x610490) & 0x001e0000)) in gf119_disp_core_fini() 189 nvkm_rd32(device, 0x610490)); in gf119_disp_core_fini() 193 nvkm_mask(device, 0x610090, 0x00000001, 0x00000000); in gf119_disp_core_fini() 194 nvkm_mask(device, 0x6100a0, 0x00000001, 0x00000000); in gf119_disp_core_fini() 202 struct nvkm_device *device = subdev->device; in gf119_disp_core_init() local 205 nvkm_mask(device, 0x6100a0, 0x00000001, 0x00000001); in gf119_disp_core_init() [all …]
|
D | corenv50.c | 172 struct nvkm_device *device = subdev->device; in nv50_disp_core_fini() local 175 nvkm_mask(device, 0x610200, 0x00000010, 0x00000000); in nv50_disp_core_fini() 176 nvkm_mask(device, 0x610200, 0x00000003, 0x00000000); in nv50_disp_core_fini() 177 if (nvkm_msec(device, 2000, in nv50_disp_core_fini() 178 if (!(nvkm_rd32(device, 0x610200) & 0x001e0000)) in nv50_disp_core_fini() 182 nvkm_rd32(device, 0x610200)); in nv50_disp_core_fini() 186 nvkm_mask(device, 0x610028, 0x00010001, 0x00000000); in nv50_disp_core_fini() 194 struct nvkm_device *device = subdev->device; in nv50_disp_core_init() local 197 nvkm_mask(device, 0x610028, 0x00010000, 0x00010000); in nv50_disp_core_init() 200 if ((nvkm_rd32(device, 0x610200) & 0x009f0000) == 0x00020000) in nv50_disp_core_init() [all …]
|
D | nv04.c | 35 struct nvkm_device *device = disp->engine.subdev.device; in nv04_disp_vblank_init() local 36 nvkm_wr32(device, 0x600140 + (head * 0x2000) , 0x00000001); in nv04_disp_vblank_init() 42 struct nvkm_device *device = disp->engine.subdev.device; in nv04_disp_vblank_fini() local 43 nvkm_wr32(device, 0x600140 + (head * 0x2000) , 0x00000000); in nv04_disp_vblank_fini() 50 struct nvkm_device *device = subdev->device; in nv04_disp_intr() local 51 u32 crtc0 = nvkm_rd32(device, 0x600100); in nv04_disp_intr() 52 u32 crtc1 = nvkm_rd32(device, 0x602100); in nv04_disp_intr() 57 nvkm_wr32(device, 0x600100, 0x00000001); in nv04_disp_intr() 62 nvkm_wr32(device, 0x602100, 0x00000001); in nv04_disp_intr() 65 if (device->chipset >= 0x10 && device->chipset <= 0x40) { in nv04_disp_intr() [all …]
|
D | sorgm204.c | 44 struct nvkm_device *device = outp->disp->engine.subdev.device; in gm204_sor_magic() local 48 nvkm_mask(device, 0x612308 + soff, 0x0000001f, 0x00000000 | data); in gm204_sor_magic() 50 nvkm_mask(device, 0x612388 + soff, 0x0000001f, 0x00000010 | data); in gm204_sor_magic() 54 gm204_sor_dp_lane_map(struct nvkm_device *device, u8 lane) in gm204_sor_dp_lane_map() argument 62 struct nvkm_device *device = outp->base.disp->engine.subdev.device; in gm204_sor_dp_pattern() local 66 nvkm_mask(device, 0x61c110 + soff, 0x0f0f0f0f, data); in gm204_sor_dp_pattern() 68 nvkm_mask(device, 0x61c12c + soff, 0x0f0f0f0f, data); in gm204_sor_dp_pattern() 75 struct nvkm_device *device = outp->base.disp->engine.subdev.device; in gm204_sor_dp_lnk_pwr() local 81 mask |= 1 << (gm204_sor_dp_lane_map(device, i) >> 3); in gm204_sor_dp_lnk_pwr() 83 nvkm_mask(device, 0x61c130 + loff, 0x0000000f, mask); in gm204_sor_dp_lnk_pwr() [all …]
|
D | gf119.c | 36 struct nvkm_device *device = disp->base.engine.subdev.device; in gf119_disp_vblank_init() local 37 nvkm_mask(device, 0x6100c0 + (head * 0x800), 0x00000001, 0x00000001); in gf119_disp_vblank_init() 43 struct nvkm_device *device = disp->base.engine.subdev.device; in gf119_disp_vblank_fini() local 44 nvkm_mask(device, 0x6100c0 + (head * 0x800), 0x00000001, 0x00000000); in gf119_disp_vblank_fini() 53 struct nvkm_bios *bios = subdev->device->bios; in exec_lookup() 98 struct nvkm_device *device = subdev->device; in exec_script() local 99 struct nvkm_bios *bios = device->bios; in exec_script() 107 ctrl = nvkm_rd32(device, 0x640180 + (or * 0x20)); in exec_script() 136 struct nvkm_device *device = subdev->device; in exec_clkcmp() local 137 struct nvkm_bios *bios = device->bios; in exec_clkcmp() [all …]
|
D | sorg94.c | 60 g94_sor_dp_lane_map(struct nvkm_device *device, u8 lane) in g94_sor_dp_lane_map() argument 65 if (device->chipset >= 0x110) in g94_sor_dp_lane_map() 67 if (device->chipset == 0xaf) in g94_sor_dp_lane_map() 75 struct nvkm_device *device = outp->base.disp->engine.subdev.device; in g94_sor_dp_pattern() local 77 nvkm_mask(device, 0x61c10c + loff, 0x0f000000, pattern << 24); in g94_sor_dp_pattern() 84 struct nvkm_device *device = outp->base.disp->engine.subdev.device; in g94_sor_dp_lnk_pwr() local 90 mask |= 1 << (g94_sor_dp_lane_map(device, i) >> 3); in g94_sor_dp_lnk_pwr() 92 nvkm_mask(device, 0x61c130 + loff, 0x0000000f, mask); in g94_sor_dp_lnk_pwr() 93 nvkm_mask(device, 0x61c034 + soff, 0x80000000, 0x80000000); in g94_sor_dp_lnk_pwr() 94 nvkm_msec(device, 2000, in g94_sor_dp_lnk_pwr() [all …]
|
D | hdmigk104.c | 34 struct nvkm_device *device = disp->base.engine.subdev.device; in gk104_hdmi_ctrl() local 58 nvkm_mask(device, 0x616798 + hoff, 0x40000000, 0x00000000); in gk104_hdmi_ctrl() 59 nvkm_mask(device, 0x6900c0 + hdmi, 0x00000001, 0x00000000); in gk104_hdmi_ctrl() 60 nvkm_mask(device, 0x690000 + hdmi, 0x00000001, 0x00000000); in gk104_hdmi_ctrl() 65 nvkm_mask(device, 0x690000 + hdmi, 0x00000001, 0x00000000); in gk104_hdmi_ctrl() 66 nvkm_wr32(device, 0x690008 + hdmi, 0x000d0282); in gk104_hdmi_ctrl() 67 nvkm_wr32(device, 0x69000c + hdmi, 0x0000006f); in gk104_hdmi_ctrl() 68 nvkm_wr32(device, 0x690010 + hdmi, 0x00000000); in gk104_hdmi_ctrl() 69 nvkm_wr32(device, 0x690014 + hdmi, 0x00000000); in gk104_hdmi_ctrl() 70 nvkm_wr32(device, 0x690018 + hdmi, 0x00000000); in gk104_hdmi_ctrl() [all …]
|
D | piocnv50.c | 34 struct nvkm_device *device = subdev->device; in nv50_disp_pioc_fini() local 37 nvkm_mask(device, 0x610200 + (chid * 0x10), 0x00000001, 0x00000000); in nv50_disp_pioc_fini() 38 if (nvkm_msec(device, 2000, in nv50_disp_pioc_fini() 39 if (!(nvkm_rd32(device, 0x610200 + (chid * 0x10)) & 0x00030000)) in nv50_disp_pioc_fini() 43 nvkm_rd32(device, 0x610200 + (chid * 0x10))); in nv50_disp_pioc_fini() 52 struct nvkm_device *device = subdev->device; in nv50_disp_pioc_init() local 55 nvkm_wr32(device, 0x610200 + (chid * 0x10), 0x00002000); in nv50_disp_pioc_init() 56 if (nvkm_msec(device, 2000, in nv50_disp_pioc_init() 57 if (!(nvkm_rd32(device, 0x610200 + (chid * 0x10)) & 0x00030000)) in nv50_disp_pioc_init() 61 nvkm_rd32(device, 0x610200 + (chid * 0x10))); in nv50_disp_pioc_init() [all …]
|
D | hdmigf119.c | 34 struct nvkm_device *device = disp->base.engine.subdev.device; in gf119_hdmi_ctrl() local 57 nvkm_mask(device, 0x616798 + hoff, 0x40000000, 0x00000000); in gf119_hdmi_ctrl() 58 nvkm_mask(device, 0x6167a4 + hoff, 0x00000001, 0x00000000); in gf119_hdmi_ctrl() 59 nvkm_mask(device, 0x616714 + hoff, 0x00000001, 0x00000000); in gf119_hdmi_ctrl() 64 nvkm_mask(device, 0x616714 + hoff, 0x00000001, 0x00000000); in gf119_hdmi_ctrl() 65 nvkm_wr32(device, 0x61671c + hoff, 0x000d0282); in gf119_hdmi_ctrl() 66 nvkm_wr32(device, 0x616720 + hoff, 0x0000006f); in gf119_hdmi_ctrl() 67 nvkm_wr32(device, 0x616724 + hoff, 0x00000000); in gf119_hdmi_ctrl() 68 nvkm_wr32(device, 0x616728 + hoff, 0x00000000); in gf119_hdmi_ctrl() 69 nvkm_wr32(device, 0x61672c + hoff, 0x00000000); in gf119_hdmi_ctrl() [all …]
|
D | piocgf119.c | 34 struct nvkm_device *device = subdev->device; in gf119_disp_pioc_fini() local 37 nvkm_mask(device, 0x610490 + (chid * 0x10), 0x00000001, 0x00000000); in gf119_disp_pioc_fini() 38 if (nvkm_msec(device, 2000, in gf119_disp_pioc_fini() 39 if (!(nvkm_rd32(device, 0x610490 + (chid * 0x10)) & 0x00030000)) in gf119_disp_pioc_fini() 43 nvkm_rd32(device, 0x610490 + (chid * 0x10))); in gf119_disp_pioc_fini() 47 nvkm_mask(device, 0x610090, 0x00000001 << chid, 0x00000000); in gf119_disp_pioc_fini() 48 nvkm_mask(device, 0x6100a0, 0x00000001 << chid, 0x00000000); in gf119_disp_pioc_fini() 56 struct nvkm_device *device = subdev->device; in gf119_disp_pioc_init() local 60 nvkm_mask(device, 0x6100a0, 0x00000001 << chid, 0x00000001 << chid); in gf119_disp_pioc_init() 63 nvkm_wr32(device, 0x610490 + (chid * 0x10), 0x00000001); in gf119_disp_pioc_init() [all …]
|
D | dmacnv50.c | 60 struct nvkm_device *device = root->disp->base.engine.subdev.device; in nv50_disp_dmac_child_new_() local 71 ret = sclass->ctor(device, oclass, data, size, &object->oproxy.object); in nv50_disp_dmac_child_new_() 89 struct nvkm_device *device = disp->base.engine.subdev.device; in nv50_disp_dmac_child_get_() local 92 sclass->engine = nvkm_device_engine(device, NVKM_ENGINE_DMAOBJ); in nv50_disp_dmac_child_get_() 140 struct nvkm_device *device = root->disp->base.engine.subdev.device; in nv50_disp_dmac_new_() local 156 dmaobj = nvkm_dma_search(device->dma, client, push); in nv50_disp_dmac_new_() 192 struct nvkm_device *device = subdev->device; in nv50_disp_dmac_fini() local 196 nvkm_mask(device, 0x610200 + (chid * 0x0010), 0x00001010, 0x00001000); in nv50_disp_dmac_fini() 197 nvkm_mask(device, 0x610200 + (chid * 0x0010), 0x00000003, 0x00000000); in nv50_disp_dmac_fini() 198 if (nvkm_msec(device, 2000, in nv50_disp_dmac_fini() [all …]
|
D | dacnv50.c | 36 struct nvkm_device *device = disp->base.engine.subdev.device; in nv50_dac_power() local 57 nvkm_msec(device, 2000, in nv50_dac_power() 58 if (!(nvkm_rd32(device, 0x61a004 + doff) & 0x80000000)) in nv50_dac_power() 61 nvkm_mask(device, 0x61a004 + doff, 0xc000007f, 0x80000000 | stat); in nv50_dac_power() 62 nvkm_msec(device, 2000, in nv50_dac_power() 63 if (!(nvkm_rd32(device, 0x61a004 + doff) & 0x80000000)) in nv50_dac_power() 73 struct nvkm_device *device = subdev->device; in nv50_dac_sense() local 91 nvkm_mask(device, 0x61a004 + doff, 0x807f0000, 0x80150000); in nv50_dac_sense() 92 nvkm_msec(device, 2000, in nv50_dac_sense() 93 if (!(nvkm_rd32(device, 0x61a004 + doff) & 0x80000000)) in nv50_dac_sense() [all …]
|
D | sorgf119.c | 42 struct nvkm_device *device = outp->base.disp->engine.subdev.device; in gf119_sor_dp_pattern() local 44 nvkm_mask(device, 0x61c110 + loff, 0x0f0f0f0f, 0x01010101 * pattern); in gf119_sor_dp_pattern() 51 struct nvkm_device *device = outp->base.disp->engine.subdev.device; in gf119_sor_dp_lnk_ctl() local 62 nvkm_mask(device, 0x612300 + soff, 0x007c0000, clksor); in gf119_sor_dp_lnk_ctl() 63 nvkm_mask(device, 0x61c10c + loff, 0x001f4000, dpctrl); in gf119_sor_dp_lnk_ctl() 71 struct nvkm_device *device = outp->base.disp->engine.subdev.device; in gf119_sor_dp_drv_ctl() local 72 struct nvkm_bios *bios = device->bios; in gf119_sor_dp_drv_ctl() 73 const u32 shift = g94_sor_dp_lane_map(device, ln); in gf119_sor_dp_drv_ctl() 91 data[0] = nvkm_rd32(device, 0x61c118 + loff) & ~(0x000000ff << shift); in gf119_sor_dp_drv_ctl() 92 data[1] = nvkm_rd32(device, 0x61c120 + loff) & ~(0x000000ff << shift); in gf119_sor_dp_drv_ctl() [all …]
|
D | rootnv50.c | 37 struct nvkm_device *device = disp->base.engine.subdev.device; in nv50_disp_root_scanoutpos() local 38 const u32 blanke = nvkm_rd32(device, 0x610aec + (head * 0x540)); in nv50_disp_root_scanoutpos() 39 const u32 blanks = nvkm_rd32(device, 0x610af4 + (head * 0x540)); in nv50_disp_root_scanoutpos() 40 const u32 total = nvkm_rd32(device, 0x610afc + (head * 0x540)); in nv50_disp_root_scanoutpos() 58 nvkm_rd32(device, 0x616340 + (head * 0x800)) & 0xffff; in nv50_disp_root_scanoutpos() 61 nvkm_rd32(device, 0x616344 + (head * 0x800)) & 0xffff; in nv50_disp_root_scanoutpos() 277 struct nvkm_device *device = disp->base.engine.subdev.device; in nv50_disp_root_new_() local 288 ret = nvkm_gpuobj_new(disp->base.engine.subdev.device, 0x10000, 0x10000, in nv50_disp_root_new_() 293 return nvkm_ramht_new(device, 0x1000, 0, root->instmem, &root->ramht); in nv50_disp_root_new_() 299 struct nvkm_device *device = root->disp->base.engine.subdev.device; in nv50_disp_root_fini() local [all …]
|
D | hdagf119.c | 38 struct nvkm_device *device = disp->base.engine.subdev.device; in gf119_hda_eld() local 57 nvkm_mask(device, 0x616618 + hoff, 0x8000000c, 0x80000001); in gf119_hda_eld() 58 nvkm_msec(device, 2000, in gf119_hda_eld() 59 u32 tmp = nvkm_rd32(device, 0x616618 + hoff); in gf119_hda_eld() 64 nvkm_mask(device, 0x616548 + hoff, 0x00000070, 0x00000000); in gf119_hda_eld() 66 nvkm_wr32(device, 0x10ec00 + soff, (i << 8) | args->v0.data[i]); in gf119_hda_eld() 68 nvkm_wr32(device, 0x10ec00 + soff, (i << 8)); in gf119_hda_eld() 69 nvkm_mask(device, 0x10ec10 + soff, 0x80000003, 0x80000003); in gf119_hda_eld() 72 nvkm_mask(device, 0x616618 + hoff, 0x80000001, 0x80000000); in gf119_hda_eld() 73 nvkm_msec(device, 2000, in gf119_hda_eld() [all …]
|
D | nv50.c | 144 nv50_disp_new_(const struct nv50_disp_func *func, struct nvkm_device *device, in nv50_disp_new_() argument 156 ret = nvkm_disp_ctor(&nv50_disp_, device, index, heads, &disp->base); in nv50_disp_new_() 166 struct nvkm_device *device = disp->base.engine.subdev.device; in nv50_disp_vblank_fini() local 167 nvkm_mask(device, 0x61002c, (4 << head), 0); in nv50_disp_vblank_fini() 173 struct nvkm_device *device = disp->base.engine.subdev.device; in nv50_disp_vblank_init() local 174 nvkm_mask(device, 0x61002c, (4 << head), (4 << head)); in nv50_disp_vblank_init() 196 struct nvkm_device *device = subdev->device; in nv50_disp_intr_error() local 197 u32 data = nvkm_rd32(device, 0x610084 + (chid * 0x08)); in nv50_disp_intr_error() 198 u32 addr = nvkm_rd32(device, 0x610080 + (chid * 0x08)); in nv50_disp_intr_error() 222 nvkm_wr32(device, 0x610020, 0x00010000 << chid); in nv50_disp_intr_error() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
D | gk104.c | 34 magic_(struct nvkm_device *device, u32 ctrl, int size) in magic_() argument 36 nvkm_wr32(device, 0x00c800, 0x00000000); in magic_() 37 nvkm_wr32(device, 0x00c808, 0x00000000); in magic_() 38 nvkm_wr32(device, 0x00c800, ctrl); in magic_() 39 nvkm_msec(device, 2000, in magic_() 40 if (nvkm_rd32(device, 0x00c800) & 0x40000000) { in magic_() 42 nvkm_wr32(device, 0x00c804, 0x00000000); in magic_() 46 nvkm_wr32(device, 0x00c800, 0x00000000); in magic_() 50 magic(struct nvkm_device *device, u32 ctrl) in magic() argument 52 magic_(device, 0x8000a41f | ctrl, 6); in magic() [all …]
|
D | base.c | 40 struct nvkm_device *device = subdev->device; in nvkm_pmu_send() local 44 addr = nvkm_rd32(device, 0x10a4a0); in nvkm_pmu_send() 45 if (nvkm_msec(device, 2000, in nvkm_pmu_send() 46 u32 tmp = nvkm_rd32(device, 0x10a4b0); in nvkm_pmu_send() 64 nvkm_wr32(device, 0x10a580, 0x00000001); in nvkm_pmu_send() 65 } while (nvkm_rd32(device, 0x10a580) != 0x00000001); in nvkm_pmu_send() 68 nvkm_wr32(device, 0x10a1c0, 0x01000000 | (((addr & 0x07) << 4) + in nvkm_pmu_send() 70 nvkm_wr32(device, 0x10a1c4, process); in nvkm_pmu_send() 71 nvkm_wr32(device, 0x10a1c4, message); in nvkm_pmu_send() 72 nvkm_wr32(device, 0x10a1c4, data0); in nvkm_pmu_send() [all …]
|
D | gk110.c | 34 struct nvkm_device *device = pmu->subdev.device; in gk110_pmu_pgob() local 58 nvkm_mask(device, 0x000200, 0x00001000, 0x00000000); in gk110_pmu_pgob() 59 nvkm_rd32(device, 0x000200); in gk110_pmu_pgob() 60 nvkm_mask(device, 0x000200, 0x08000000, 0x08000000); in gk110_pmu_pgob() 63 nvkm_mask(device, 0x10a78c, 0x00000002, 0x00000002); in gk110_pmu_pgob() 64 nvkm_mask(device, 0x10a78c, 0x00000001, 0x00000001); in gk110_pmu_pgob() 65 nvkm_mask(device, 0x10a78c, 0x00000001, 0x00000000); in gk110_pmu_pgob() 67 nvkm_mask(device, 0x0206b4, 0x00000000, 0x00000000); in gk110_pmu_pgob() 69 nvkm_wr32(device, magic[i].addr, magic[i].data); in gk110_pmu_pgob() 70 nvkm_msec(device, 2000, in gk110_pmu_pgob() [all …]
|
D | gk20a.c | 54 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_target() 62 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_cur_state() 73 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_target_state() 103 struct nvkm_device *device = pmu->base.subdev.device; in gk20a_pmu_dvfs_get_dev_status() local 104 status->busy = nvkm_rd32(device, 0x10a508 + (BUSY_SLOT * 0x10)); in gk20a_pmu_dvfs_get_dev_status() 105 status->total= nvkm_rd32(device, 0x10a508 + (CLK_SLOT * 0x10)); in gk20a_pmu_dvfs_get_dev_status() 112 struct nvkm_device *device = pmu->base.subdev.device; in gk20a_pmu_dvfs_reset_dev_status() local 113 nvkm_wr32(device, 0x10a508 + (BUSY_SLOT * 0x10), 0x80000000); in gk20a_pmu_dvfs_reset_dev_status() 114 nvkm_wr32(device, 0x10a508 + (CLK_SLOT * 0x10), 0x80000000); in gk20a_pmu_dvfs_reset_dev_status() 125 struct nvkm_device *device = subdev->device; in gk20a_pmu_dvfs_work() local [all …]
|
/linux-4.4.14/drivers/firewire/ |
D | core-device.c | 171 static const struct ieee1394_device_id *unit_match(struct device *dev, in unit_match() 187 static bool is_fw_unit(struct device *dev); 189 static int fw_unit_match(struct device *dev, struct device_driver *drv) in fw_unit_match() 195 static int fw_unit_probe(struct device *dev) in fw_unit_probe() 203 static int fw_unit_remove(struct device *dev) in fw_unit_remove() 222 static int fw_unit_uevent(struct device *dev, struct kobj_uevent_env *env) in fw_unit_uevent() 243 int fw_device_enable_phys_dma(struct fw_device *device) in fw_device_enable_phys_dma() argument 245 int generation = device->generation; in fw_device_enable_phys_dma() 250 return device->card->driver->enable_phys_dma(device->card, in fw_device_enable_phys_dma() 251 device->node_id, in fw_device_enable_phys_dma() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | nv04.c | 52 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_pause() local 58 nvkm_wr32(device, NV03_PFIFO_CACHES, 0x00000000); in nv04_fifo_pause() 59 nvkm_mask(device, NV04_PFIFO_CACHE1_PULL0, 0x00000001, 0x00000000); in nv04_fifo_pause() 70 nvkm_msec(device, 2000, in nv04_fifo_pause() 71 u32 tmp = nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0); in nv04_fifo_pause() 76 if (nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0) & in nv04_fifo_pause() 78 nvkm_wr32(device, NV03_PFIFO_INTR_0, NV_PFIFO_INTR_CACHE_ERROR); in nv04_fifo_pause() 80 nvkm_wr32(device, NV04_PFIFO_CACHE1_HASH, 0x00000000); in nv04_fifo_pause() 88 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_start() local 91 nvkm_mask(device, NV04_PFIFO_CACHE1_PULL0, 0x00000001, 0x00000001); in nv04_fifo_start() [all …]
|
D | gf100.c | 38 struct nvkm_device *device = fifo->engine.subdev.device; in gf100_fifo_uevent_init() local 39 nvkm_mask(device, 0x002140, 0x80000000, 0x80000000); in gf100_fifo_uevent_init() 45 struct nvkm_device *device = fifo->engine.subdev.device; in gf100_fifo_uevent_fini() local 46 nvkm_mask(device, 0x002140, 0x80000000, 0x00000000); in gf100_fifo_uevent_fini() 54 struct nvkm_device *device = subdev->device; in gf100_fifo_runlist_update() local 70 nvkm_wr32(device, 0x002270, nvkm_memory_addr(cur) >> 12); in gf100_fifo_runlist_update() 71 nvkm_wr32(device, 0x002274, 0x01f00000 | nr); in gf100_fifo_runlist_update() 74 !(nvkm_rd32(device, 0x00227c) & 0x00100000), in gf100_fifo_runlist_update() 100 struct nvkm_device *device = fifo->base.engine.subdev.device; in gf100_fifo_engine() local 113 return nvkm_device_engine(device, engn); in gf100_fifo_engine() [all …]
|
D | gk104.c | 38 struct nvkm_device *device = fifo->engine.subdev.device; in gk104_fifo_uevent_fini() local 39 nvkm_mask(device, 0x002140, 0x80000000, 0x00000000); in gk104_fifo_uevent_fini() 45 struct nvkm_device *device = fifo->engine.subdev.device; in gk104_fifo_uevent_init() local 46 nvkm_mask(device, 0x002140, 0x80000000, 0x80000000); in gk104_fifo_uevent_init() 55 struct nvkm_device *device = subdev->device; in gk104_fifo_runlist_update() local 71 nvkm_wr32(device, 0x002270, nvkm_memory_addr(cur) >> 12); in gk104_fifo_runlist_update() 72 nvkm_wr32(device, 0x002274, (engine << 20) | nr); in gk104_fifo_runlist_update() 74 if (wait_event_timeout(engn->wait, !(nvkm_rd32(device, 0x002284 + in gk104_fifo_runlist_update() 84 struct nvkm_device *device = fifo->base.engine.subdev.device; in gk104_fifo_engine() local 87 return nvkm_device_engine(device, __ffs(subdevs)); in gk104_fifo_engine() [all …]
|
D | nv40.c | 63 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv40_fifo_init() local 64 struct nvkm_fb *fb = device->fb; in nv40_fifo_init() 65 struct nvkm_instmem *imem = device->imem; in nv40_fifo_init() 70 nvkm_wr32(device, 0x002040, 0x000000ff); in nv40_fifo_init() 71 nvkm_wr32(device, 0x002044, 0x2101ffff); in nv40_fifo_init() 72 nvkm_wr32(device, 0x002058, 0x00000001); in nv40_fifo_init() 74 nvkm_wr32(device, NV03_PFIFO_RAMHT, (0x03 << 24) /* search 128 */ | in nv40_fifo_init() 77 nvkm_wr32(device, NV03_PFIFO_RAMRO, nvkm_memory_addr(ramro) >> 8); in nv40_fifo_init() 79 switch (device->chipset) { in nv40_fifo_init() 83 nvkm_wr32(device, 0x002230, 0x00000001); in nv40_fifo_init() [all …]
|
D | nv50.c | 32 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_runlist_update_locked() local 41 if (nvkm_rd32(device, 0x002600 + (i * 4)) & 0x80000000) in nv50_fifo_runlist_update_locked() 46 nvkm_wr32(device, 0x0032f4, nvkm_memory_addr(cur) >> 12); in nv50_fifo_runlist_update_locked() 47 nvkm_wr32(device, 0x0032ec, p); in nv50_fifo_runlist_update_locked() 48 nvkm_wr32(device, 0x002500, 0x00000101); in nv50_fifo_runlist_update_locked() 63 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_oneinit() local 66 ret = nvkm_memory_new(device, NVKM_MEM_TARGET_INST, 128 * 4, 0x1000, in nv50_fifo_oneinit() 71 return nvkm_memory_new(device, NVKM_MEM_TARGET_INST, 128 * 4, 0x1000, in nv50_fifo_oneinit() 79 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_init() local 82 nvkm_mask(device, 0x000200, 0x00000100, 0x00000000); in nv50_fifo_init() [all …]
|
D | nv17.c | 54 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv17_fifo_init() local 55 struct nvkm_instmem *imem = device->imem; in nv17_fifo_init() 60 nvkm_wr32(device, NV04_PFIFO_DELAY_0, 0x000000ff); in nv17_fifo_init() 61 nvkm_wr32(device, NV04_PFIFO_DMA_TIMESLICE, 0x0101ffff); in nv17_fifo_init() 63 nvkm_wr32(device, NV03_PFIFO_RAMHT, (0x03 << 24) /* search 128 */ | in nv17_fifo_init() 66 nvkm_wr32(device, NV03_PFIFO_RAMRO, nvkm_memory_addr(ramro) >> 8); in nv17_fifo_init() 67 nvkm_wr32(device, NV03_PFIFO_RAMFC, nvkm_memory_addr(ramfc) >> 8 | in nv17_fifo_init() 70 nvkm_wr32(device, NV03_PFIFO_CACHE1_PUSH1, fifo->base.nr - 1); in nv17_fifo_init() 72 nvkm_wr32(device, NV03_PFIFO_INTR_0, 0xffffffff); in nv17_fifo_init() 73 nvkm_wr32(device, NV03_PFIFO_INTR_EN_0, 0xffffffff); in nv17_fifo_init() [all …]
|
/linux-4.4.14/drivers/s390/block/ |
D | dasd.c | 92 struct dasd_device *device; in dasd_alloc_device() local 94 device = kzalloc(sizeof(struct dasd_device), GFP_ATOMIC); in dasd_alloc_device() 95 if (!device) in dasd_alloc_device() 99 device->ccw_mem = (void *) __get_free_pages(GFP_ATOMIC | GFP_DMA, 1); in dasd_alloc_device() 100 if (!device->ccw_mem) { in dasd_alloc_device() 101 kfree(device); in dasd_alloc_device() 105 device->erp_mem = (void *) get_zeroed_page(GFP_ATOMIC | GFP_DMA); in dasd_alloc_device() 106 if (!device->erp_mem) { in dasd_alloc_device() 107 free_pages((unsigned long) device->ccw_mem, 1); in dasd_alloc_device() 108 kfree(device); in dasd_alloc_device() [all …]
|
D | dasd_devmap.c | 50 struct dasd_device *device; member 429 new->device = NULL; in dasd_add_busid() 483 BUG_ON(devmap->device != NULL); in dasd_forget_ranges() 498 struct dasd_device *device; in dasd_device_from_devindex() local 510 if (devmap && devmap->device) { in dasd_device_from_devindex() 511 device = devmap->device; in dasd_device_from_devindex() 512 dasd_get_device(device); in dasd_device_from_devindex() 514 device = ERR_PTR(-ENODEV); in dasd_device_from_devindex() 516 return device; in dasd_device_from_devindex() 542 struct dasd_device *device; in dasd_create_device() local [all …]
|
D | dasd_3990_erp.c | 73 struct dasd_device *device = erp->startdev; in dasd_3990_erp_block_queue() local 76 DBF_DEV_EVENT(DBF_INFO, device, in dasd_3990_erp_block_queue() 79 spin_lock_irqsave(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_block_queue() 80 dasd_device_set_stop_bits(device, DASD_STOPPED_PENDING); in dasd_3990_erp_block_queue() 81 spin_unlock_irqrestore(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_block_queue() 86 dasd_device_set_timer(device, expires); in dasd_3990_erp_block_queue() 105 struct dasd_device *device = erp->startdev; in dasd_3990_erp_int_req() local 118 dev_err(&device->cdev->dev, in dasd_3990_erp_int_req() 146 struct dasd_device *device = erp->startdev; in dasd_3990_erp_alternate_path() local 151 spin_lock_irqsave(get_ccwdev_lock(device->cdev), flags); in dasd_3990_erp_alternate_path() [all …]
|
D | dasd_alias.c | 186 int dasd_alias_make_device_known_to_lcu(struct dasd_device *device) in dasd_alias_make_device_known_to_lcu() argument 194 private = (struct dasd_eckd_private *) device->private; in dasd_alias_make_device_known_to_lcu() 196 device->discipline->get_uid(device, &uid); in dasd_alias_make_device_known_to_lcu() 232 list_add(&device->alias_list, &lcu->inactive_devices); in dasd_alias_make_device_known_to_lcu() 245 void dasd_alias_disconnect_device_from_lcu(struct dasd_device *device) in dasd_alias_disconnect_device_from_lcu() argument 254 private = (struct dasd_eckd_private *) device->private; in dasd_alias_disconnect_device_from_lcu() 259 device->discipline->get_uid(device, &uid); in dasd_alias_disconnect_device_from_lcu() 261 list_del_init(&device->alias_list); in dasd_alias_disconnect_device_from_lcu() 263 if (device == lcu->suc_data.device) { in dasd_alias_disconnect_device_from_lcu() 267 if (device == lcu->suc_data.device) { in dasd_alias_disconnect_device_from_lcu() [all …]
|
D | dasd_erp.c | 27 struct dasd_device * device) in dasd_alloc_erp_request() argument 43 spin_lock_irqsave(&device->mem_lock, flags); in dasd_alloc_erp_request() 45 dasd_alloc_chunk(&device->erp_chunks, size); in dasd_alloc_erp_request() 46 spin_unlock_irqrestore(&device->mem_lock, flags); in dasd_alloc_erp_request() 67 dasd_get_device(device); in dasd_alloc_erp_request() 72 dasd_free_erp_request(struct dasd_ccw_req *cqr, struct dasd_device * device) in dasd_free_erp_request() argument 76 spin_lock_irqsave(&device->mem_lock, flags); in dasd_free_erp_request() 77 dasd_free_chunk(&device->erp_chunks, cqr); in dasd_free_erp_request() 78 spin_unlock_irqrestore(&device->mem_lock, flags); in dasd_free_erp_request() 79 atomic_dec(&device->ref_count); in dasd_free_erp_request() [all …]
|
D | dasd_diag.c | 104 mdsk_init_io(struct dasd_device *device, unsigned int blocksize, in mdsk_init_io() argument 111 private = (struct dasd_diag_private *) device->private; in mdsk_init_io() 131 mdsk_term_io(struct dasd_device * device) in mdsk_term_io() argument 137 private = (struct dasd_diag_private *) device->private; in mdsk_term_io() 148 dasd_diag_erp(struct dasd_device *device) in dasd_diag_erp() argument 152 mdsk_term_io(device); in dasd_diag_erp() 153 rc = mdsk_init_io(device, device->block->bp_block, 0, NULL); in dasd_diag_erp() 155 if (!(test_and_set_bit(DASD_FLAG_DEVICE_RO, &device->flags))) in dasd_diag_erp() 158 dev_name(&device->cdev->dev)); in dasd_diag_erp() 163 "rc=%d\n", dev_name(&device->cdev->dev), rc); in dasd_diag_erp() [all …]
|
D | dasd_eckd.c | 107 struct dasd_device *device; member 119 struct dasd_device *device; member 213 struct dasd_device *device) in check_XRC() argument 218 private = (struct dasd_eckd_private *) device->private; in check_XRC() 238 unsigned int totrk, int cmd, struct dasd_device *device) in define_extent() argument 245 private = (struct dasd_eckd_private *) device->private; in define_extent() 272 rc = check_XRC (ccw, data, device); in define_extent() 277 rc = check_XRC (ccw, data, device); in define_extent() 285 rc = check_XRC (ccw, data, device); in define_extent() 288 dev_err(&device->cdev->dev, in define_extent() [all …]
|
/linux-4.4.14/drivers/gpu/host1x/ |
D | bus.c | 43 static int host1x_subdev_add(struct host1x_device *device, in host1x_subdev_add() argument 55 mutex_lock(&device->subdevs_lock); in host1x_subdev_add() 56 list_add_tail(&subdev->list, &device->subdevs); in host1x_subdev_add() 57 mutex_unlock(&device->subdevs_lock); in host1x_subdev_add() 75 static int host1x_device_parse_dt(struct host1x_device *device, in host1x_device_parse_dt() argument 81 for_each_child_of_node(device->dev.parent->of_node, np) { in host1x_device_parse_dt() 84 err = host1x_subdev_add(device, np); in host1x_device_parse_dt() 93 static void host1x_subdev_register(struct host1x_device *device, in host1x_subdev_register() argument 104 mutex_lock(&device->subdevs_lock); in host1x_subdev_register() 105 mutex_lock(&device->clients_lock); in host1x_subdev_register() [all …]
|
/linux-4.4.14/drivers/hid/ |
D | hid-roccat.c | 48 struct device *dev; 65 struct roccat_device *device; member 80 struct roccat_device *device = reader->device; in roccat_read() local 85 mutex_lock(&device->cbuf_lock); in roccat_read() 88 if (reader->cbuf_start == device->cbuf_end) { in roccat_read() 89 add_wait_queue(&device->wait, &wait); in roccat_read() 93 while (reader->cbuf_start == device->cbuf_end) { in roccat_read() 102 if (!device->exist) { in roccat_read() 107 mutex_unlock(&device->cbuf_lock); in roccat_read() 109 mutex_lock(&device->cbuf_lock); in roccat_read() [all …]
|
/linux-4.4.14/drivers/infiniband/core/ |
D | device.c | 85 static int ib_device_check_mandatory(struct ib_device *device) in ib_device_check_mandatory() argument 116 if (!*(void **) ((void *) device + mandatory_table[i].offset)) { in ib_device_check_mandatory() 118 device->name, mandatory_table[i].name); in ib_device_check_mandatory() 128 struct ib_device *device; in __ib_device_get_by_name() local 130 list_for_each_entry(device, &device_list, core_list) in __ib_device_get_by_name() 131 if (!strncmp(name, device->name, IB_DEVICE_NAME_MAX)) in __ib_device_get_by_name() 132 return device; in __ib_device_get_by_name() 142 struct ib_device *device; in alloc_name() local 149 list_for_each_entry(device, &device_list, core_list) { in alloc_name() 150 if (!sscanf(device->name, name, &i)) in alloc_name() [all …]
|
D | cache.c | 54 struct ib_device *device; member 170 event.device = ib_dev; in write_gid() 725 int ib_get_cached_gid(struct ib_device *device, in ib_get_cached_gid() argument 731 if (port_num < rdma_start_port(device) || port_num > rdma_end_port(device)) in ib_get_cached_gid() 734 return __ib_cache_gid_get(device, port_num, index, gid, gid_attr); in ib_get_cached_gid() 738 int ib_find_cached_gid(struct ib_device *device, in ib_find_cached_gid() argument 744 return ib_cache_gid_find(device, gid, ndev, port_num, index); in ib_find_cached_gid() 748 int ib_find_gid_by_filter(struct ib_device *device, in ib_find_gid_by_filter() argument 757 if (!rdma_cap_roce_gid_table(device, port_num) && filter) in ib_find_gid_by_filter() 760 return ib_cache_gid_find_by_filter(device, gid, in ib_find_gid_by_filter() [all …]
|
D | agent.c | 57 __ib_get_agent_port(const struct ib_device *device, int port_num) in __ib_get_agent_port() argument 62 if (entry->agent[1]->device == device && in __ib_get_agent_port() 70 ib_get_agent_port(const struct ib_device *device, int port_num) in ib_get_agent_port() argument 76 entry = __ib_get_agent_port(device, port_num); in ib_get_agent_port() 82 const struct ib_wc *wc, const struct ib_device *device, in agent_send_response() argument 91 if (rdma_cap_ib_switch(device)) in agent_send_response() 92 port_priv = ib_get_agent_port(device, 0); in agent_send_response() 94 port_priv = ib_get_agent_port(device, port_num); in agent_send_response() 97 dev_err(&device->dev, "Unable to find port agent\n"); in agent_send_response() 104 dev_err(&device->dev, "ib_create_ah_from_wc error %ld\n", in agent_send_response() [all …]
|
D | verbs.c | 199 enum rdma_link_layer rdma_port_get_link_layer(struct ib_device *device, u8 port_num) in rdma_port_get_link_layer() argument 201 if (device->get_link_layer) in rdma_port_get_link_layer() 202 return device->get_link_layer(device, port_num); in rdma_port_get_link_layer() 204 switch (rdma_node_get_transport(device->node_type)) { in rdma_port_get_link_layer() 229 struct ib_pd *ib_alloc_pd(struct ib_device *device) in ib_alloc_pd() argument 235 rc = ib_query_device(device, &devattr); in ib_alloc_pd() 239 pd = device->alloc_pd(device, NULL, NULL); in ib_alloc_pd() 243 pd->device = device; in ib_alloc_pd() 249 pd->local_dma_lkey = device->local_dma_lkey; in ib_alloc_pd() 290 ret = pd->device->dealloc_pd(pd); in ib_dealloc_pd() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/ltc/ |
D | gm107.c | 32 struct nvkm_device *device = ltc->subdev.device; in gm107_ltc_cbc_clear() local 33 nvkm_wr32(device, 0x17e270, start); in gm107_ltc_cbc_clear() 34 nvkm_wr32(device, 0x17e274, limit); in gm107_ltc_cbc_clear() 35 nvkm_wr32(device, 0x17e26c, 0x00000004); in gm107_ltc_cbc_clear() 41 struct nvkm_device *device = ltc->subdev.device; in gm107_ltc_cbc_wait() local 46 nvkm_msec(device, 2000, in gm107_ltc_cbc_wait() 47 if (!nvkm_rd32(device, addr)) in gm107_ltc_cbc_wait() 57 struct nvkm_device *device = ltc->subdev.device; in gm107_ltc_zbc_clear_color() local 58 nvkm_mask(device, 0x17e338, 0x0000000f, i); in gm107_ltc_zbc_clear_color() 59 nvkm_wr32(device, 0x17e33c, color[0]); in gm107_ltc_zbc_clear_color() [all …]
|
D | gf100.c | 33 struct nvkm_device *device = ltc->subdev.device; in gf100_ltc_cbc_clear() local 34 nvkm_wr32(device, 0x17e8cc, start); in gf100_ltc_cbc_clear() 35 nvkm_wr32(device, 0x17e8d0, limit); in gf100_ltc_cbc_clear() 36 nvkm_wr32(device, 0x17e8c8, 0x00000004); in gf100_ltc_cbc_clear() 42 struct nvkm_device *device = ltc->subdev.device; in gf100_ltc_cbc_wait() local 47 nvkm_msec(device, 2000, in gf100_ltc_cbc_wait() 48 if (!nvkm_rd32(device, addr)) in gf100_ltc_cbc_wait() 58 struct nvkm_device *device = ltc->subdev.device; in gf100_ltc_zbc_clear_color() local 59 nvkm_mask(device, 0x17ea44, 0x0000000f, i); in gf100_ltc_zbc_clear_color() 60 nvkm_wr32(device, 0x17ea48, color[0]); in gf100_ltc_zbc_clear_color() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/devinit/ |
D | nv04.c | 39 struct nvkm_device *device = subdev->device; in nv04_devinit_meminit() local 45 fb = fbmem_init(device); in nv04_devinit_meminit() 52 nvkm_wrvgas(device, 0, 1, nvkm_rdvgas(device, 0, 1) | 0x20); in nv04_devinit_meminit() 53 nvkm_mask(device, NV04_PFB_DEBUG_0, 0, NV04_PFB_DEBUG_0_REFRESH_OFF); in nv04_devinit_meminit() 55 nvkm_mask(device, NV04_PFB_BOOT_0, ~0, in nv04_devinit_meminit() 66 nvkm_mask(device, NV04_PFB_BOOT_0, in nv04_devinit_meminit() 69 nvkm_mask(device, NV04_PFB_DEBUG_0, in nv04_devinit_meminit() 76 nvkm_mask(device, NV04_PFB_BOOT_0, in nv04_devinit_meminit() 82 nvkm_mask(device, NV04_PFB_BOOT_0, in nv04_devinit_meminit() 89 nvkm_mask(device, NV04_PFB_BOOT_0, in nv04_devinit_meminit() [all …]
|
D | gm204.c | 33 struct nvkm_device *device = init->base.subdev.device; in pmu_code() local 34 struct nvkm_bios *bios = device->bios; in pmu_code() 37 nvkm_wr32(device, 0x10a180, 0x01000000 | (sec ? 0x10000000 : 0) | pmu); in pmu_code() 40 nvkm_wr32(device, 0x10a188, (pmu + i) >> 8); in pmu_code() 41 nvkm_wr32(device, 0x10a184, nvbios_rd32(bios, img + i)); in pmu_code() 45 nvkm_wr32(device, 0x10a184, 0x00000000); in pmu_code() 53 struct nvkm_device *device = init->base.subdev.device; in pmu_data() local 54 struct nvkm_bios *bios = device->bios; in pmu_data() 57 nvkm_wr32(device, 0x10a1c0, 0x01000000 | pmu); in pmu_data() 59 nvkm_wr32(device, 0x10a1c4, nvbios_rd32(bios, img + i)); in pmu_data() [all …]
|
D | nv05.c | 48 struct nvkm_device *device = subdev->device; in nv05_devinit_meminit() local 49 struct nvkm_bios *bios = device->bios; in nv05_devinit_meminit() 57 fb = fbmem_init(device); in nv05_devinit_meminit() 63 strap = (nvkm_rd32(device, 0x101000) & 0x0000003c) >> 2; in nv05_devinit_meminit() 73 nvkm_wrvgas(device, 0, 1, nvkm_rdvgas(device, 0, 1) | 0x20); in nv05_devinit_meminit() 75 if (nvkm_rd32(device, NV04_PFB_BOOT_0) & NV04_PFB_BOOT_0_UMA_ENABLE) in nv05_devinit_meminit() 78 nvkm_mask(device, NV04_PFB_DEBUG_0, NV04_PFB_DEBUG_0_REFRESH_OFF, 0); in nv05_devinit_meminit() 84 nvkm_wr32(device, NV04_PFB_SCRAMBLE(i), scramble); in nv05_devinit_meminit() 89 nvkm_mask(device, NV04_PFB_BOOT_0, 0x3f, ramcfg[0]); in nv05_devinit_meminit() 92 nvkm_mask(device, NV04_PFB_CFG0, 0, NV04_PFB_CFG0_SCRAMBLE); in nv05_devinit_meminit() [all …]
|
D | nv50.c | 38 struct nvkm_device *device = subdev->device; in nv50_devinit_pll_set() local 39 struct nvkm_bios *bios = device->bios; in nv50_devinit_pll_set() 59 nvkm_wr32(device, info.reg + 0, 0x10000611); in nv50_devinit_pll_set() 60 nvkm_mask(device, info.reg + 4, 0x00ff00ff, (M1 << 16) | N1); in nv50_devinit_pll_set() 61 nvkm_mask(device, info.reg + 8, 0x7fff00ff, (P << 28) | in nv50_devinit_pll_set() 65 nvkm_mask(device, info.reg + 0, 0x01ff0000, in nv50_devinit_pll_set() 69 nvkm_wr32(device, info.reg + 4, (N1 << 8) | M1); in nv50_devinit_pll_set() 72 nvkm_mask(device, info.reg + 0, 0x00070000, (P << 16)); in nv50_devinit_pll_set() 73 nvkm_wr32(device, info.reg + 4, (N1 << 8) | M1); in nv50_devinit_pll_set() 83 struct nvkm_device *device = init->subdev.device; in nv50_devinit_disable() local [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/ibus/ |
D | gk104.c | 29 struct nvkm_device *device = ibus->device; in gk104_ibus_intr_hub() local 30 u32 addr = nvkm_rd32(device, 0x122120 + (i * 0x0800)); in gk104_ibus_intr_hub() 31 u32 data = nvkm_rd32(device, 0x122124 + (i * 0x0800)); in gk104_ibus_intr_hub() 32 u32 stat = nvkm_rd32(device, 0x122128 + (i * 0x0800)); in gk104_ibus_intr_hub() 34 nvkm_mask(device, 0x122128 + (i * 0x0800), 0x00000200, 0x00000000); in gk104_ibus_intr_hub() 40 struct nvkm_device *device = ibus->device; in gk104_ibus_intr_rop() local 41 u32 addr = nvkm_rd32(device, 0x124120 + (i * 0x0800)); in gk104_ibus_intr_rop() 42 u32 data = nvkm_rd32(device, 0x124124 + (i * 0x0800)); in gk104_ibus_intr_rop() 43 u32 stat = nvkm_rd32(device, 0x124128 + (i * 0x0800)); in gk104_ibus_intr_rop() 45 nvkm_mask(device, 0x124128 + (i * 0x0800), 0x00000200, 0x00000000); in gk104_ibus_intr_rop() [all …]
|
D | gf100.c | 29 struct nvkm_device *device = ibus->device; in gf100_ibus_intr_hub() local 30 u32 addr = nvkm_rd32(device, 0x122120 + (i * 0x0400)); in gf100_ibus_intr_hub() 31 u32 data = nvkm_rd32(device, 0x122124 + (i * 0x0400)); in gf100_ibus_intr_hub() 32 u32 stat = nvkm_rd32(device, 0x122128 + (i * 0x0400)); in gf100_ibus_intr_hub() 34 nvkm_mask(device, 0x122128 + (i * 0x0400), 0x00000200, 0x00000000); in gf100_ibus_intr_hub() 40 struct nvkm_device *device = ibus->device; in gf100_ibus_intr_rop() local 41 u32 addr = nvkm_rd32(device, 0x124120 + (i * 0x0400)); in gf100_ibus_intr_rop() 42 u32 data = nvkm_rd32(device, 0x124124 + (i * 0x0400)); in gf100_ibus_intr_rop() 43 u32 stat = nvkm_rd32(device, 0x124128 + (i * 0x0400)); in gf100_ibus_intr_rop() 45 nvkm_mask(device, 0x124128 + (i * 0x0400), 0x00000200, 0x00000000); in gf100_ibus_intr_rop() [all …]
|
D | gk20a.c | 28 struct nvkm_device *device = ibus->device; in gk20a_ibus_init_ibus_ring() local 29 nvkm_mask(device, 0x137250, 0x3f, 0); in gk20a_ibus_init_ibus_ring() 31 nvkm_mask(device, 0x000200, 0x20, 0); in gk20a_ibus_init_ibus_ring() 33 nvkm_mask(device, 0x000200, 0x20, 0x20); in gk20a_ibus_init_ibus_ring() 35 nvkm_wr32(device, 0x12004c, 0x4); in gk20a_ibus_init_ibus_ring() 36 nvkm_wr32(device, 0x122204, 0x2); in gk20a_ibus_init_ibus_ring() 37 nvkm_rd32(device, 0x122204); in gk20a_ibus_init_ibus_ring() 43 nvkm_wr32(device, 0x122354, 0x800); in gk20a_ibus_init_ibus_ring() 44 nvkm_wr32(device, 0x128328, 0x800); in gk20a_ibus_init_ibus_ring() 45 nvkm_wr32(device, 0x124320, 0x800); in gk20a_ibus_init_ibus_ring() [all …]
|
/linux-4.4.14/drivers/acpi/ |
D | scan.c | 28 #define ACPI_IS_ROOT_DEVICE(device) (!(device)->parent) argument 145 struct acpi_device *device = NULL; in acpi_bus_offline() local 150 if (acpi_bus_get_device(handle, &device)) in acpi_bus_offline() 153 if (device->handler && !device->handler->hotplug.enabled) { in acpi_bus_offline() 154 *ret_p = &device->dev; in acpi_bus_offline() 158 mutex_lock(&device->physical_node_lock); in acpi_bus_offline() 160 list_for_each_entry(pn, &device->physical_node_list, node) { in acpi_bus_offline() 185 mutex_unlock(&device->physical_node_lock); in acpi_bus_offline() 193 struct acpi_device *device = NULL; in acpi_bus_online() local 196 if (acpi_bus_get_device(handle, &device)) in acpi_bus_online() [all …]
|
D | processor_driver.c | 51 static int acpi_processor_start(struct device *dev); 52 static int acpi_processor_stop(struct device *dev); 71 struct acpi_device *device = data; in acpi_processor_notify() local 75 if (device->handle != handle) in acpi_processor_notify() 78 pr = acpi_driver_data(device); in acpi_processor_notify() 88 acpi_bus_generate_netlink_event(device->pnp.device_class, in acpi_processor_notify() 89 dev_name(&device->dev), event, in acpi_processor_notify() 94 acpi_bus_generate_netlink_event(device->pnp.device_class, in acpi_processor_notify() 95 dev_name(&device->dev), event, 0); in acpi_processor_notify() 99 acpi_bus_generate_netlink_event(device->pnp.device_class, in acpi_processor_notify() [all …]
|
D | button.c | 73 static int acpi_button_add(struct acpi_device *device); 74 static int acpi_button_remove(struct acpi_device *device); 75 static void acpi_button_notify(struct acpi_device *device, u32 event); 78 static int acpi_button_suspend(struct device *dev); 79 static int acpi_button_resume(struct device *dev); 118 struct acpi_device *device = seq->private; in acpi_button_state_seq_show() local 122 status = acpi_evaluate_integer(device->handle, "_LID", NULL, &state); in acpi_button_state_seq_show() 142 static int acpi_button_add_fs(struct acpi_device *device) in acpi_button_add_fs() argument 144 struct acpi_button *button = acpi_driver_data(device); in acpi_button_add_fs() 170 acpi_device_dir(device) = proc_mkdir(acpi_device_bid(device), acpi_lid_dir); in acpi_button_add_fs() [all …]
|
D | fan.c | 47 static int acpi_fan_suspend(struct device *dev); 48 static int acpi_fan_resume(struct device *dev); 97 struct acpi_device *device = cdev->devdata; in fan_get_max_state() local 98 struct acpi_fan *fan = acpi_driver_data(device); in fan_get_max_state() 107 static int fan_get_state_acpi4(struct acpi_device *device, unsigned long *state) in fan_get_state_acpi4() argument 110 struct acpi_fan *fan = acpi_driver_data(device); in fan_get_state_acpi4() 115 status = acpi_evaluate_object(device->handle, "_FST", NULL, &buffer); in fan_get_state_acpi4() 117 dev_err(&device->dev, "Get fan state failed\n"); in fan_get_state_acpi4() 125 dev_err(&device->dev, "Invalid _FST data\n"); in fan_get_state_acpi4() 136 dev_dbg(&device->dev, "Invalid control value returned\n"); in fan_get_state_acpi4() [all …]
|
D | device_pm.c | 63 int acpi_device_get_power(struct acpi_device *device, int *state) in acpi_device_get_power() argument 67 if (!device || !state) in acpi_device_get_power() 70 if (!device->flags.power_manageable) { in acpi_device_get_power() 72 *state = device->parent ? in acpi_device_get_power() 73 device->parent->power.state : ACPI_STATE_D0; in acpi_device_get_power() 81 if (device->power.flags.power_resources) { in acpi_device_get_power() 82 int error = acpi_power_get_inferred_state(device, &result); in acpi_device_get_power() 86 if (device->power.flags.explicit_get) { in acpi_device_get_power() 87 acpi_handle handle = device->handle; in acpi_device_get_power() 114 if (!device->power.flags.ignore_parent && device->parent in acpi_device_get_power() [all …]
|
D | acpi_video.c | 90 static int acpi_video_bus_add(struct acpi_device *device); 91 static int acpi_video_bus_remove(struct acpi_device *device); 92 static void acpi_video_bus_notify(struct acpi_device *device, u32 event); 151 struct acpi_device *device; member 224 struct acpi_video_device *device); 226 static int acpi_video_device_lcd_set_level(struct acpi_video_device *device, 229 struct acpi_video_device *device, 231 static int acpi_video_get_next_level(struct acpi_video_device *device, 274 struct acpi_device *device = cooling_dev->devdata; in video_get_max_state() local 275 struct acpi_video_device *video = acpi_driver_data(device); in video_get_max_state() [all …]
|
D | ac.c | 56 static int acpi_ac_add(struct acpi_device *device); 57 static int acpi_ac_remove(struct acpi_device *device); 58 static void acpi_ac_notify(struct acpi_device *device, u32 event); 67 static int acpi_ac_resume(struct device *dev); 96 struct acpi_device * device; member 124 status = acpi_evaluate_integer(ac->device->handle, "_PSR", NULL, in acpi_ac_get_state() 212 if (!acpi_device_dir(ac->device)) { in acpi_ac_add_fs() 213 acpi_device_dir(ac->device) = in acpi_ac_add_fs() 214 proc_mkdir(acpi_device_bid(ac->device), acpi_ac_dir); in acpi_ac_add_fs() 215 if (!acpi_device_dir(ac->device)) in acpi_ac_add_fs() [all …]
|
D | power.c | 59 struct acpi_device device; member 82 struct acpi_power_resource *to_power_resource(struct acpi_device *device) in to_power_resource() argument 84 return container_of(device, struct acpi_power_resource, device); in to_power_resource() 89 struct acpi_device *device; in acpi_power_get_context() local 91 if (acpi_bus_get_device(handle, &device)) in acpi_power_get_context() 94 return to_power_resource(device); in acpi_power_get_context() 205 acpi_handle handle = resource->device.handle; in acpi_power_get_list_state() 229 status = acpi_evaluate_object(resource->device.handle, "_ON", NULL, NULL); in __acpi_power_on() 269 status = acpi_evaluate_object(resource->device.handle, "_OFF", in __acpi_power_off() 388 struct acpi_device *res_dev = &entry->resource->device; in acpi_power_hide_list() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/ |
D | nv50.c | 39 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 128 * 4, in nv50_mpeg_cclass_bind() 63 struct nvkm_device *device = subdev->device; in nv50_mpeg_intr() local 64 u32 stat = nvkm_rd32(device, 0x00b100); in nv50_mpeg_intr() 65 u32 type = nvkm_rd32(device, 0x00b230); in nv50_mpeg_intr() 66 u32 mthd = nvkm_rd32(device, 0x00b234); in nv50_mpeg_intr() 67 u32 data = nvkm_rd32(device, 0x00b238); in nv50_mpeg_intr() 73 nvkm_wr32(device, 0x00b308, 0x00000100); in nv50_mpeg_intr() 83 nvkm_wr32(device, 0x00b100, stat); in nv50_mpeg_intr() 84 nvkm_wr32(device, 0x00b230, 0x00000001); in nv50_mpeg_intr() 91 struct nvkm_device *device = subdev->device; in nv50_mpeg_init() local [all …]
|
D | nv31.c | 42 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, align, in nv31_mpeg_object_bind() 117 struct nvkm_device *device = mpeg->engine.subdev.device; in nv31_mpeg_tile() local 119 nvkm_wr32(device, 0x00b008 + (i * 0x10), tile->pitch); in nv31_mpeg_tile() 120 nvkm_wr32(device, 0x00b004 + (i * 0x10), tile->limit); in nv31_mpeg_tile() 121 nvkm_wr32(device, 0x00b000 + (i * 0x10), tile->addr); in nv31_mpeg_tile() 125 nv31_mpeg_mthd_dma(struct nvkm_device *device, u32 mthd, u32 data) in nv31_mpeg_mthd_dma() argument 128 u32 dma0 = nvkm_rd32(device, 0x700000 + inst); in nv31_mpeg_mthd_dma() 129 u32 dma1 = nvkm_rd32(device, 0x700004 + inst); in nv31_mpeg_mthd_dma() 130 u32 dma2 = nvkm_rd32(device, 0x700008 + inst); in nv31_mpeg_mthd_dma() 140 nvkm_mask(device, 0x00b300, 0x00010000, in nv31_mpeg_mthd_dma() [all …]
|
D | nv40.c | 31 nv40_mpeg_mthd_dma(struct nvkm_device *device, u32 mthd, u32 data) in nv40_mpeg_mthd_dma() argument 33 struct nvkm_instmem *imem = device->imem; in nv40_mpeg_mthd_dma() 47 nvkm_mask(device, 0x00b300, 0x00030000, (dma0 & 0x00030000)); in nv40_mpeg_mthd_dma() 48 nvkm_wr32(device, 0x00b334, base); in nv40_mpeg_mthd_dma() 49 nvkm_wr32(device, 0x00b324, size); in nv40_mpeg_mthd_dma() 53 nvkm_mask(device, 0x00b300, 0x000c0000, (dma0 & 0x00030000) << 2); in nv40_mpeg_mthd_dma() 54 nvkm_wr32(device, 0x00b360, base); in nv40_mpeg_mthd_dma() 55 nvkm_wr32(device, 0x00b364, size); in nv40_mpeg_mthd_dma() 61 nvkm_wr32(device, 0x00b370, base); in nv40_mpeg_mthd_dma() 62 nvkm_wr32(device, 0x00b374, size); in nv40_mpeg_mthd_dma() [all …]
|
D | nv44.c | 56 int ret = nvkm_gpuobj_new(chan->object.engine->subdev.device, 264 * 4, in nv44_mpeg_chan_bind() 73 struct nvkm_device *device = mpeg->engine.subdev.device; in nv44_mpeg_chan_fini() local 76 nvkm_mask(device, 0x00b32c, 0x00000001, 0x00000000); in nv44_mpeg_chan_fini() 77 if (nvkm_rd32(device, 0x00b318) == inst) in nv44_mpeg_chan_fini() 78 nvkm_mask(device, 0x00b318, 0x80000000, 0x00000000); in nv44_mpeg_chan_fini() 79 nvkm_mask(device, 0x00b32c, 0x00000001, 0x00000001); in nv44_mpeg_chan_fini() 129 nv44_mpeg_mthd(struct nvkm_device *device, u32 mthd, u32 data) in nv44_mpeg_mthd() argument 135 return nv40_mpeg_mthd_dma(device, mthd, data); in nv44_mpeg_mthd() 147 struct nvkm_device *device = subdev->device; in nv44_mpeg_intr() local 150 u32 inst = nvkm_rd32(device, 0x00b318) & 0x000fffff; in nv44_mpeg_intr() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/therm/ |
D | gf119.c | 30 struct nvkm_device *device = subdev->device; in pwm_info() local 31 u32 gpio = nvkm_rd32(device, 0x00d610 + (line * 0x04)); in pwm_info() 55 struct nvkm_device *device = therm->subdev.device; in gf119_fan_pwm_ctrl() local 61 nvkm_mask(device, 0x00d610 + (line * 0x04), 0x000000c0, data); in gf119_fan_pwm_ctrl() 69 struct nvkm_device *device = therm->subdev.device; in gf119_fan_pwm_get() local 74 if (nvkm_rd32(device, 0x00d610 + (line * 0x04)) & 0x00000040) { in gf119_fan_pwm_get() 75 *divs = nvkm_rd32(device, 0x00e114 + (indx * 8)); in gf119_fan_pwm_get() 76 *duty = nvkm_rd32(device, 0x00e118 + (indx * 8)); in gf119_fan_pwm_get() 80 *divs = nvkm_rd32(device, 0x0200d8) & 0x1fff; in gf119_fan_pwm_get() 81 *duty = nvkm_rd32(device, 0x0200dc) & 0x1fff; in gf119_fan_pwm_get() [all …]
|
D | nv40.c | 32 switch (therm->subdev.device->chipset) { in nv40_sensor_style() 55 struct nvkm_device *device = therm->subdev.device; in nv40_sensor_setup() local 60 nvkm_mask(device, 0x15b8, 0x80000000, 0); in nv40_sensor_setup() 61 nvkm_wr32(device, 0x15b0, 0x80003fff); in nv40_sensor_setup() 63 return nvkm_rd32(device, 0x15b4) & 0x3fff; in nv40_sensor_setup() 65 nvkm_wr32(device, 0x15b0, 0xff); in nv40_sensor_setup() 67 return nvkm_rd32(device, 0x15b4) & 0xff; in nv40_sensor_setup() 75 struct nvkm_device *device = therm->subdev.device; in nv40_temp_get() local 81 nvkm_wr32(device, 0x15b0, 0x80003fff); in nv40_temp_get() 82 core_temp = nvkm_rd32(device, 0x15b4) & 0x3fff; in nv40_temp_get() [all …]
|
D | g84.c | 32 struct nvkm_device *device = therm->subdev.device; in g84_temp_get() local 34 if (nvkm_fuse_read(device->fuse, 0x1a8) == 1) in g84_temp_get() 35 return nvkm_rd32(device, 0x20400); in g84_temp_get() 43 struct nvkm_device *device = therm->subdev.device; in g84_sensor_setup() local 46 if (nvkm_fuse_read(device->fuse, 0x1a8) == 1) { in g84_sensor_setup() 47 nvkm_mask(device, 0x20008, 0x80008000, 0x80000000); in g84_sensor_setup() 48 nvkm_mask(device, 0x2000c, 0x80000003, 0x00000000); in g84_sensor_setup() 58 struct nvkm_device *device = subdev->device; in g84_therm_program_alarms() local 64 nvkm_wr32(device, 0x20000, 0x000003ff); in g84_therm_program_alarms() 67 nvkm_wr32(device, 0x20484, sensor->thrs_shutdown.hysteresis); in g84_therm_program_alarms() [all …]
|
D | nv50.c | 57 struct nvkm_device *device = therm->subdev.device; in nv50_fan_pwm_ctrl() local 61 nvkm_mask(device, ctrl, 0x00010001 << line, data << line); in nv50_fan_pwm_ctrl() 68 struct nvkm_device *device = therm->subdev.device; in nv50_fan_pwm_get() local 73 if (nvkm_rd32(device, ctrl) & (1 << line)) { in nv50_fan_pwm_get() 74 *divs = nvkm_rd32(device, 0x00e114 + (id * 8)); in nv50_fan_pwm_get() 75 *duty = nvkm_rd32(device, 0x00e118 + (id * 8)); in nv50_fan_pwm_get() 85 struct nvkm_device *device = therm->subdev.device; in nv50_fan_pwm_set() local 90 nvkm_wr32(device, 0x00e114 + (id * 8), divs); in nv50_fan_pwm_set() 91 nvkm_wr32(device, 0x00e118 + (id * 8), duty | 0x80000000); in nv50_fan_pwm_set() 98 struct nvkm_device *device = therm->subdev.device; in nv50_fan_pwm_clock() local [all …]
|
D | gt215.c | 31 struct nvkm_device *device = therm->subdev.device; in gt215_therm_fan_sense() local 32 u32 tach = nvkm_rd32(device, 0x00e728) & 0x0000ffff; in gt215_therm_fan_sense() 33 u32 ctrl = nvkm_rd32(device, 0x00e720); in gt215_therm_fan_sense() 42 struct nvkm_device *device = therm->subdev.device; in gt215_therm_init() local 48 nvkm_mask(device, 0x00e720, 0x00000003, 0x00000002); in gt215_therm_init() 50 nvkm_wr32(device, 0x00e724, device->crystal * 1000); in gt215_therm_init() 51 nvkm_mask(device, 0x00e720, 0x001f0000, tach->line << 16); in gt215_therm_init() 52 nvkm_mask(device, 0x00e720, 0x00000001, 0x00000001); in gt215_therm_init() 54 nvkm_mask(device, 0x00e720, 0x00000002, 0x00000000); in gt215_therm_init() 71 gt215_therm_new(struct nvkm_device *device, int index, in gt215_therm_new() argument [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
D | ramnv40.c | 38 struct nvkm_bios *bios = subdev->device->bios; in nv40_ram_calc() 71 struct nvkm_device *device = subdev->device; in nv40_ram_prog() local 72 struct nvkm_bios *bios = device->bios; in nv40_ram_prog() 80 u32 vbl = nvkm_rd32(device, 0x600808 + (i * 0x2000)); in nv40_ram_prog() 83 if (vbl != nvkm_rd32(device, 0x600808 + (i * 0x2000))) { in nv40_ram_prog() 84 nvkm_wr08(device, 0x0c03c4 + (i * 0x2000), 0x01); in nv40_ram_prog() 85 sr1[i] = nvkm_rd08(device, 0x0c03c5 + (i * 0x2000)); in nv40_ram_prog() 99 nvkm_msec(device, 2000, in nv40_ram_prog() 100 u32 tmp = nvkm_rd32(device, 0x600808 + (i * 0x2000)); in nv40_ram_prog() 105 nvkm_msec(device, 2000, in nv40_ram_prog() [all …]
|
D | gf100.c | 41 struct nvkm_device *device = subdev->device; in gf100_fb_intr() local 42 u32 intr = nvkm_rd32(device, 0x000100); in gf100_fb_intr() 53 struct nvkm_device *device = fb->base.subdev.device; in gf100_fb_init() local 56 nvkm_wr32(device, 0x100c10, fb->r100c10 >> 8); in gf100_fb_init() 58 nvkm_mask(device, 0x100c80, 0x00000001, 0x00000000); /* 128KiB lpg */ in gf100_fb_init() 65 struct nvkm_device *device = fb->base.subdev.device; in gf100_fb_dtor() local 68 dma_unmap_page(device->dev, fb->r100c10, PAGE_SIZE, in gf100_fb_dtor() 77 gf100_fb_new_(const struct nvkm_fb_func *func, struct nvkm_device *device, in gf100_fb_new_() argument 84 nvkm_fb_ctor(func, device, index, &fb->base); in gf100_fb_new_() 89 fb->r100c10 = dma_map_page(device->dev, fb->r100c10_page, 0, in gf100_fb_new_() [all …]
|
D | nv50.c | 157 struct nvkm_device *device = subdev->device; in nv50_fb_intr() local 158 struct nvkm_fifo *fifo = device->fifo; in nv50_fb_intr() 166 idx = nvkm_rd32(device, 0x100c90); in nv50_fb_intr() 172 nvkm_wr32(device, 0x100c90, idx | i << 24); in nv50_fb_intr() 173 trap[i] = nvkm_rd32(device, 0x100c94); in nv50_fb_intr() 175 nvkm_wr32(device, 0x100c90, idx | 0x80000000); in nv50_fb_intr() 178 if (device->chipset < 0xa3 || in nv50_fb_intr() 179 device->chipset == 0xaa || device->chipset == 0xac) { in nv50_fb_intr() 217 struct nvkm_device *device = fb->base.subdev.device; in nv50_fb_init() local 223 nvkm_wr32(device, 0x100c08, fb->r100c08 >> 8); in nv50_fb_init() [all …]
|
/linux-4.4.14/drivers/pcmcia/ |
D | sa1100_generic.h | 7 extern int pcmcia_adsbitsy_init(struct device *); 8 extern int pcmcia_assabet_init(struct device *); 9 extern int pcmcia_badge4_init(struct device *); 10 extern int pcmcia_cerf_init(struct device *); 11 extern int pcmcia_flexanet_init(struct device *); 12 extern int pcmcia_freebird_init(struct device *); 13 extern int pcmcia_gcplus_init(struct device *); 14 extern int pcmcia_graphicsmaster_init(struct device *); 15 extern int pcmcia_h3600_init(struct device *); 16 extern int pcmcia_nanoengine_init(struct device *); [all …]
|
/linux-4.4.14/drivers/dma/ |
D | dmaengine.c | 79 static struct dma_chan *dev_to_dma_chan(struct device *dev) in dev_to_dma_chan() 83 chan_dev = container_of(dev, typeof(*chan_dev), device); in dev_to_dma_chan() 87 static ssize_t memcpy_count_show(struct device *dev, in memcpy_count_show() 109 static ssize_t bytes_transferred_show(struct device *dev, in bytes_transferred_show() 131 static ssize_t in_use_show(struct device *dev, struct device_attribute *attr, in in_use_show() 157 static void chan_dev_release(struct device *dev) in chan_dev_release() 161 chan_dev = container_of(dev, typeof(*chan_dev), device); in chan_dev_release() 179 #define dma_device_satisfies_mask(device, mask) \ argument 180 __dma_device_satisfies_mask((device), &(mask)) 182 __dma_device_satisfies_mask(struct dma_device *device, in __dma_device_satisfies_mask() argument [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bus/ |
D | nv50.c | 33 struct nvkm_device *device = bus->subdev.device; in nv50_bus_hwsq_exec() local 36 nvkm_mask(device, 0x001098, 0x00000008, 0x00000000); in nv50_bus_hwsq_exec() 37 nvkm_wr32(device, 0x001304, 0x00000000); in nv50_bus_hwsq_exec() 39 nvkm_wr32(device, 0x001400 + (i * 4), data[i]); in nv50_bus_hwsq_exec() 40 nvkm_mask(device, 0x001098, 0x00000018, 0x00000018); in nv50_bus_hwsq_exec() 41 nvkm_wr32(device, 0x00130c, 0x00000003); in nv50_bus_hwsq_exec() 43 if (nvkm_msec(device, 2000, in nv50_bus_hwsq_exec() 44 if (!(nvkm_rd32(device, 0x001308) & 0x00000100)) in nv50_bus_hwsq_exec() 56 struct nvkm_device *device = subdev->device; in nv50_bus_intr() local 57 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in nv50_bus_intr() [all …]
|
D | nv31.c | 34 struct nvkm_device *device = subdev->device; in nv31_bus_intr() local 35 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in nv31_bus_intr() 36 u32 gpio = nvkm_rd32(device, 0x001104) & nvkm_rd32(device, 0x001144); in nv31_bus_intr() 39 struct nvkm_gpio *gpio = device->gpio; in nv31_bus_intr() 45 u32 addr = nvkm_rd32(device, 0x009084); in nv31_bus_intr() 46 u32 data = nvkm_rd32(device, 0x009088); in nv31_bus_intr() 53 nvkm_wr32(device, 0x001100, 0x00000008); in nv31_bus_intr() 57 struct nvkm_therm *therm = device->therm; in nv31_bus_intr() 61 nvkm_wr32(device, 0x001100, 0x00070000); in nv31_bus_intr() 66 nvkm_mask(device, 0x001140, stat, 0x00000000); in nv31_bus_intr() [all …]
|
D | gf100.c | 31 struct nvkm_device *device = subdev->device; in gf100_bus_intr() local 32 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in gf100_bus_intr() 35 u32 addr = nvkm_rd32(device, 0x009084); in gf100_bus_intr() 36 u32 data = nvkm_rd32(device, 0x009088); in gf100_bus_intr() 46 nvkm_wr32(device, 0x009084, 0x00000000); in gf100_bus_intr() 47 nvkm_wr32(device, 0x001100, (stat & 0x0000000e)); in gf100_bus_intr() 53 nvkm_mask(device, 0x001140, stat, 0x00000000); in gf100_bus_intr() 60 struct nvkm_device *device = bus->subdev.device; in gf100_bus_init() local 61 nvkm_wr32(device, 0x001100, 0xffffffff); in gf100_bus_init() 62 nvkm_wr32(device, 0x001140, 0x0000000e); in gf100_bus_init() [all …]
|
D | nv04.c | 35 struct nvkm_device *device = subdev->device; in nv04_bus_intr() local 36 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in nv04_bus_intr() 41 nvkm_wr32(device, 0x001100, 0x00000001); in nv04_bus_intr() 45 struct nvkm_gpio *gpio = device->gpio; in nv04_bus_intr() 49 nvkm_wr32(device, 0x001100, 0x00000110); in nv04_bus_intr() 54 nvkm_mask(device, 0x001140, stat, 0x00000000); in nv04_bus_intr() 61 struct nvkm_device *device = bus->subdev.device; in nv04_bus_init() local 62 nvkm_wr32(device, 0x001100, 0xffffffff); in nv04_bus_init() 63 nvkm_wr32(device, 0x001140, 0x00000111); in nv04_bus_init() 73 nv04_bus_new(struct nvkm_device *device, int index, struct nvkm_bus **pbus) in nv04_bus_new() argument [all …]
|
D | g94.c | 32 struct nvkm_device *device = bus->subdev.device; in g94_bus_hwsq_exec() local 35 nvkm_mask(device, 0x001098, 0x00000008, 0x00000000); in g94_bus_hwsq_exec() 36 nvkm_wr32(device, 0x001304, 0x00000000); in g94_bus_hwsq_exec() 37 nvkm_wr32(device, 0x001318, 0x00000000); in g94_bus_hwsq_exec() 39 nvkm_wr32(device, 0x080000 + (i * 4), data[i]); in g94_bus_hwsq_exec() 40 nvkm_mask(device, 0x001098, 0x00000018, 0x00000018); in g94_bus_hwsq_exec() 41 nvkm_wr32(device, 0x00130c, 0x00000001); in g94_bus_hwsq_exec() 43 if (nvkm_msec(device, 2000, in g94_bus_hwsq_exec() 44 if (!(nvkm_rd32(device, 0x001308) & 0x00000100)) in g94_bus_hwsq_exec() 61 g94_bus_new(struct nvkm_device *device, int index, struct nvkm_bus **pbus) in g94_bus_new() argument [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/clk/ |
D | gt215.c | 45 struct nvkm_device *device = clk->base.subdev.device; in read_vco() local 46 u32 sctl = nvkm_rd32(device, 0x4120 + (idx * 4)); in read_vco() 50 return device->crystal; in read_vco() 63 struct nvkm_device *device = clk->base.subdev.device; in read_clk() local 68 if (device->chipset == 0xaf) { in read_clk() 70 return nvkm_rd32(device, 0x00471c) * 1000; in read_clk() 73 return device->crystal; in read_clk() 76 sctl = nvkm_rd32(device, 0x4120 + (idx * 4)); in read_clk() 88 return device->crystal; in read_clk() 110 struct nvkm_device *device = clk->base.subdev.device; in read_pll() local [all …]
|
D | gk104.c | 52 struct nvkm_device *device = clk->base.subdev.device; in read_vco() local 53 u32 ssrc = nvkm_rd32(device, dsrc); in read_vco() 62 struct nvkm_device *device = clk->base.subdev.device; in read_pll() local 63 u32 ctrl = nvkm_rd32(device, pll + 0x00); in read_pll() 64 u32 coef = nvkm_rd32(device, pll + 0x04); in read_pll() 77 sclk = device->crystal; in read_pll() 86 fN = nvkm_rd32(device, pll + 0x10) >> 16; in read_pll() 108 struct nvkm_device *device = clk->base.subdev.device; in read_div() local 109 u32 ssrc = nvkm_rd32(device, dsrc + (doff * 4)); in read_div() 110 u32 sctl = nvkm_rd32(device, dctl + (doff * 4)); in read_div() [all …]
|
D | gf100.c | 51 struct nvkm_device *device = clk->base.subdev.device; in read_vco() local 52 u32 ssrc = nvkm_rd32(device, dsrc); in read_vco() 61 struct nvkm_device *device = clk->base.subdev.device; in read_pll() local 62 u32 ctrl = nvkm_rd32(device, pll + 0x00); in read_pll() 63 u32 coef = nvkm_rd32(device, pll + 0x04); in read_pll() 75 sclk = device->crystal; in read_pll() 100 struct nvkm_device *device = clk->base.subdev.device; in read_div() local 101 u32 ssrc = nvkm_rd32(device, dsrc + (doff * 4)); in read_div() 102 u32 sctl = nvkm_rd32(device, dctl + (doff * 4)); in read_div() 107 return device->crystal; in read_div() [all …]
|
D | gk20a.c | 127 struct nvkm_device *device = clk->base.subdev.device; in gk20a_pllg_read_mnp() local 130 val = nvkm_rd32(device, GPCPLL_COEFF); in gk20a_pllg_read_mnp() 271 struct nvkm_device *device = subdev->device; in gk20a_pllg_slide() local 276 val = nvkm_rd32(device, GPCPLL_COEFF); in gk20a_pllg_slide() 282 nvkm_mask(device, GPCPLL_CFG2, 0xff << GPCPLL_CFG2_PLL_STEPA_SHIFT, in gk20a_pllg_slide() 284 nvkm_mask(device, GPCPLL_CFG3, 0xff << GPCPLL_CFG3_PLL_STEPB_SHIFT, in gk20a_pllg_slide() 288 nvkm_mask(device, GPCPLL_NDIV_SLOWDOWN, in gk20a_pllg_slide() 293 val = nvkm_rd32(device, GPCPLL_COEFF); in gk20a_pllg_slide() 297 nvkm_wr32(device, GPCPLL_COEFF, val); in gk20a_pllg_slide() 300 val = nvkm_rd32(device, GPCPLL_NDIV_SLOWDOWN); in gk20a_pllg_slide() [all …]
|
D | nv50.c | 34 struct nvkm_device *device = clk->base.subdev.device; in read_div() local 35 switch (device->chipset) { in read_div() 41 return nvkm_rd32(device, 0x004700); in read_div() 45 return nvkm_rd32(device, 0x004800); in read_div() 55 struct nvkm_device *device = subdev->device; in read_pll_src() local 57 u32 rsel = nvkm_rd32(device, 0x00e18c); in read_pll_src() 60 switch (device->chipset) { in read_pll_src() 73 coef = nvkm_rd32(device, 0x00e81c + (id * 0x0c)); in read_pll_src() 82 coef = nvkm_rd32(device, 0x00e81c); in read_pll_src() 90 rsel = nvkm_rd32(device, 0x00c050); in read_pll_src() [all …]
|
D | mcp77.c | 44 struct nvkm_device *device = clk->base.subdev.device; in read_div() local 45 return nvkm_rd32(device, 0x004600); in read_div() 51 struct nvkm_device *device = clk->base.subdev.device; in read_pll() local 52 u32 ctrl = nvkm_rd32(device, base + 0); in read_pll() 53 u32 coef = nvkm_rd32(device, base + 4); in read_pll() 61 post_div = 1 << ((nvkm_rd32(device, 0x4070) & 0x000f0000) >> 16); in read_pll() 64 post_div = (nvkm_rd32(device, 0x4040) & 0x000f0000) >> 16; in read_pll() 85 struct nvkm_device *device = subdev->device; in mcp77_clk_read() local 86 u32 mast = nvkm_rd32(device, 0x00c054); in mcp77_clk_read() 91 return device->crystal; in mcp77_clk_read() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/timer/ |
D | nv04.c | 31 struct nvkm_device *device = subdev->device; in nv04_timer_time() local 38 nvkm_wr32(device, NV04_PTIMER_TIME_1, hi); in nv04_timer_time() 39 nvkm_wr32(device, NV04_PTIMER_TIME_0, lo); in nv04_timer_time() 45 struct nvkm_device *device = tmr->subdev.device; in nv04_timer_read() local 49 hi = nvkm_rd32(device, NV04_PTIMER_TIME_1); in nv04_timer_read() 50 lo = nvkm_rd32(device, NV04_PTIMER_TIME_0); in nv04_timer_read() 51 } while (hi != nvkm_rd32(device, NV04_PTIMER_TIME_1)); in nv04_timer_read() 59 struct nvkm_device *device = tmr->subdev.device; in nv04_timer_alarm_fini() local 60 nvkm_wr32(device, NV04_PTIMER_INTR_EN_0, 0x00000000); in nv04_timer_alarm_fini() 66 struct nvkm_device *device = tmr->subdev.device; in nv04_timer_alarm_init() local [all …]
|
/linux-4.4.14/drivers/platform/x86/ |
D | dell-rbtn.c | 39 static enum rbtn_type rbtn_check(struct acpi_device *device) in rbtn_check() argument 44 status = acpi_evaluate_integer(device->handle, "CRBT", NULL, &output); in rbtn_check() 60 static int rbtn_get(struct acpi_device *device) in rbtn_get() argument 65 status = acpi_evaluate_integer(device->handle, "GRBT", NULL, &output); in rbtn_get() 72 static int rbtn_acquire(struct acpi_device *device, bool enable) in rbtn_acquire() argument 83 status = acpi_evaluate_object(device->handle, "ARBT", &input, NULL); in rbtn_acquire() 97 struct acpi_device *device = data; in rbtn_rfkill_query() local 100 state = rbtn_get(device); in rbtn_rfkill_query() 118 static int rbtn_rfkill_init(struct acpi_device *device) in rbtn_rfkill_init() argument 120 struct rbtn_data *rbtn_data = device->driver_data; in rbtn_rfkill_init() [all …]
|
D | surfacepro3_button.c | 70 static void surface_button_notify(struct acpi_device *device, u32 event) in surface_button_notify() argument 72 struct surface_button *button = acpi_driver_data(device); in surface_button_notify() 107 dev_info_ratelimited(&device->dev, in surface_button_notify() 115 pm_wakeup_event(&device->dev, 0); in surface_button_notify() 123 static int surface_button_suspend(struct device *dev) in surface_button_suspend() 125 struct acpi_device *device = to_acpi_device(dev); in surface_button_suspend() local 126 struct surface_button *button = acpi_driver_data(device); in surface_button_suspend() 132 static int surface_button_resume(struct device *dev) in surface_button_resume() 134 struct acpi_device *device = to_acpi_device(dev); in surface_button_resume() local 135 struct surface_button *button = acpi_driver_data(device); in surface_button_resume() [all …]
|
D | xo15-ebook.c | 49 static int ebook_send_state(struct acpi_device *device) in ebook_send_state() argument 51 struct ebook_switch *button = acpi_driver_data(device); in ebook_send_state() 55 status = acpi_evaluate_integer(device->handle, "EBK", NULL, &state); in ebook_send_state() 65 static void ebook_switch_notify(struct acpi_device *device, u32 event) in ebook_switch_notify() argument 70 ebook_send_state(device); in ebook_switch_notify() 80 static int ebook_switch_resume(struct device *dev) in ebook_switch_resume() 88 static int ebook_switch_add(struct acpi_device *device) in ebook_switch_add() argument 92 const char *hid = acpi_device_hid(device); in ebook_switch_add() 100 device->driver_data = button; in ebook_switch_add() 108 name = acpi_device_name(device); in ebook_switch_add() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/ |
D | falcon.c | 48 return nvkm_gpuobj_new(object->engine->subdev.device, 256, in nvkm_falcon_cclass_bind() 62 struct nvkm_device *device = subdev->device; in nvkm_falcon_intr() local 64 u32 dest = nvkm_rd32(device, base + 0x01c); in nvkm_falcon_intr() 65 u32 intr = nvkm_rd32(device, base + 0x008) & dest & ~(dest >> 16); in nvkm_falcon_intr() 66 u32 inst = nvkm_rd32(device, base + 0x050) & 0x3fffffff; in nvkm_falcon_intr() 70 chan = nvkm_fifo_chan_inst(device->fifo, (u64)inst << 12, &flags); in nvkm_falcon_intr() 75 nvkm_wr32(device, base + 0x004, 0x00000040); in nvkm_falcon_intr() 82 nvkm_wr32(device, base + 0x004, 0x00000010); in nvkm_falcon_intr() 88 nvkm_wr32(device, base + 0x004, intr); in nvkm_falcon_intr() 91 nvkm_fifo_chan_put(device->fifo, flags, &chan); in nvkm_falcon_intr() [all …]
|
D | xtensa.c | 47 return nvkm_gpuobj_new(object->engine->subdev.device, 0x10000, align, in nvkm_xtensa_cclass_bind() 61 struct nvkm_device *device = subdev->device; in nvkm_xtensa_intr() local 63 u32 unk104 = nvkm_rd32(device, base + 0xd04); in nvkm_xtensa_intr() 64 u32 intr = nvkm_rd32(device, base + 0xc20); in nvkm_xtensa_intr() 65 u32 chan = nvkm_rd32(device, base + 0xc28); in nvkm_xtensa_intr() 66 u32 unk10c = nvkm_rd32(device, base + 0xd0c); in nvkm_xtensa_intr() 70 nvkm_wr32(device, base + 0xc20, intr); in nvkm_xtensa_intr() 71 intr = nvkm_rd32(device, base + 0xc20); in nvkm_xtensa_intr() 74 nvkm_mask(device, xtensa->addr + 0xd94, 0, xtensa->func->fifo_val); in nvkm_xtensa_intr() 82 struct nvkm_device *device = xtensa->engine.subdev.device; in nvkm_xtensa_fini() local [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/device/ |
D | user.c | 38 struct nvkm_device *device; member 45 struct nvkm_device *device = udev->device; in nvkm_udevice_info() local 46 struct nvkm_fb *fb = device->fb; in nvkm_udevice_info() 47 struct nvkm_instmem *imem = device->imem; in nvkm_udevice_info() 59 switch (device->chipset) { in nvkm_udevice_info() 73 switch (device->type) { in nvkm_udevice_info() 93 switch (device->card_type) { in nvkm_udevice_info() 109 args->v0.chipset = device->chipset; in nvkm_udevice_info() 110 args->v0.revision = device->chiprev; in nvkm_udevice_info() 118 strncpy(args->v0.chip, device->chip->name, sizeof(args->v0.chip)); in nvkm_udevice_info() [all …]
|
D | base.c | 38 struct nvkm_device *device; in nvkm_device_find_locked() local 39 list_for_each_entry(device, &nv_devices, head) { in nvkm_device_find_locked() 40 if (device->handle == handle) in nvkm_device_find_locked() 41 return device; in nvkm_device_find_locked() 49 struct nvkm_device *device; in nvkm_device_find() local 51 device = nvkm_device_find_locked(handle); in nvkm_device_find() 53 return device; in nvkm_device_find() 59 struct nvkm_device *device; in nvkm_device_list() local 62 list_for_each_entry(device, &nv_devices, head) { in nvkm_device_list() 64 name[nr - 1] = device->handle; in nvkm_device_list() [all …]
|
/linux-4.4.14/arch/arm/include/asm/ |
D | dma-mapping.h | 20 static inline struct dma_map_ops *__generic_dma_ops(struct device *dev) in __generic_dma_ops() 27 static inline struct dma_map_ops *get_dma_ops(struct device *dev) in get_dma_ops() 35 static inline void set_dma_ops(struct device *dev, struct dma_map_ops *ops) in set_dma_ops() 42 extern int dma_supported(struct device *dev, u64 mask); 61 static inline dma_addr_t pfn_to_dma(struct device *dev, unsigned long pfn) in pfn_to_dma() 68 static inline unsigned long dma_to_pfn(struct device *dev, dma_addr_t addr) in dma_to_pfn() 78 static inline void *dma_to_virt(struct device *dev, dma_addr_t addr) in dma_to_virt() 89 static inline dma_addr_t virt_to_dma(struct device *dev, void *addr) in virt_to_dma() 98 static inline dma_addr_t pfn_to_dma(struct device *dev, unsigned long pfn) in pfn_to_dma() 103 static inline unsigned long dma_to_pfn(struct device *dev, dma_addr_t addr) in dma_to_pfn() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/pm/ |
D | gf100.c | 131 struct nvkm_device *device = pm->engine.subdev.device; in gf100_perfctr_init() local 139 nvkm_wr32(device, dom->addr + 0x09c, 0x00040002 | (dom->mode << 3)); in gf100_perfctr_init() 140 nvkm_wr32(device, dom->addr + 0x100, 0x00000000); in gf100_perfctr_init() 141 nvkm_wr32(device, dom->addr + 0x040 + (ctr->slot * 0x08), src); in gf100_perfctr_init() 142 nvkm_wr32(device, dom->addr + 0x044 + (ctr->slot * 0x08), log); in gf100_perfctr_init() 149 struct nvkm_device *device = pm->engine.subdev.device; in gf100_perfctr_read() local 152 case 0: ctr->ctr = nvkm_rd32(device, dom->addr + 0x08c); break; in gf100_perfctr_read() 153 case 1: ctr->ctr = nvkm_rd32(device, dom->addr + 0x088); break; in gf100_perfctr_read() 154 case 2: ctr->ctr = nvkm_rd32(device, dom->addr + 0x080); break; in gf100_perfctr_read() 155 case 3: ctr->ctr = nvkm_rd32(device, dom->addr + 0x090); break; in gf100_perfctr_read() [all …]
|
D | nv40.c | 30 struct nvkm_device *device = pm->engine.subdev.device; in nv40_perfctr_init() local 38 nvkm_wr32(device, 0x00a7c0 + dom->addr, 0x00000001 | (dom->mode << 4)); in nv40_perfctr_init() 39 nvkm_wr32(device, 0x00a400 + dom->addr + (ctr->slot * 0x40), src); in nv40_perfctr_init() 40 nvkm_wr32(device, 0x00a420 + dom->addr + (ctr->slot * 0x40), log); in nv40_perfctr_init() 47 struct nvkm_device *device = pm->engine.subdev.device; in nv40_perfctr_read() local 50 case 0: ctr->ctr = nvkm_rd32(device, 0x00a700 + dom->addr); break; in nv40_perfctr_read() 51 case 1: ctr->ctr = nvkm_rd32(device, 0x00a6c0 + dom->addr); break; in nv40_perfctr_read() 52 case 2: ctr->ctr = nvkm_rd32(device, 0x00a680 + dom->addr); break; in nv40_perfctr_read() 53 case 3: ctr->ctr = nvkm_rd32(device, 0x00a740 + dom->addr); break; in nv40_perfctr_read() 55 dom->clk = nvkm_rd32(device, 0x00a600 + dom->addr); in nv40_perfctr_read() [all …]
|
/linux-4.4.14/drivers/video/fbdev/core/ |
D | fbsysfs.c | 39 struct fb_info *framebuffer_alloc(size_t size, struct device *dev) in framebuffer_alloc() 60 info->device = dev; in framebuffer_alloc() 127 static ssize_t store_mode(struct device *device, struct device_attribute *attr, in store_mode() argument 130 struct fb_info *fb_info = dev_get_drvdata(device); in store_mode() 158 static ssize_t show_mode(struct device *device, struct device_attribute *attr, in show_mode() argument 161 struct fb_info *fb_info = dev_get_drvdata(device); in show_mode() 169 static ssize_t store_modes(struct device *device, in store_modes() argument 173 struct fb_info *fb_info = dev_get_drvdata(device); in store_modes() 201 static ssize_t show_modes(struct device *device, struct device_attribute *attr, in show_modes() argument 204 struct fb_info *fb_info = dev_get_drvdata(device); in show_modes() [all …]
|
/linux-4.4.14/drivers/misc/cxl/ |
D | sysfs.c | 21 static ssize_t caia_version_show(struct device *device, in caia_version_show() argument 25 struct cxl *adapter = to_cxl_adapter(device); in caia_version_show() 31 static ssize_t psl_revision_show(struct device *device, in psl_revision_show() argument 35 struct cxl *adapter = to_cxl_adapter(device); in psl_revision_show() 40 static ssize_t base_image_show(struct device *device, in base_image_show() argument 44 struct cxl *adapter = to_cxl_adapter(device); in base_image_show() 49 static ssize_t image_loaded_show(struct device *device, in image_loaded_show() argument 53 struct cxl *adapter = to_cxl_adapter(device); in image_loaded_show() 60 static ssize_t reset_adapter_store(struct device *device, in reset_adapter_store() argument 64 struct cxl *adapter = to_cxl_adapter(device); in reset_adapter_store() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/dispnv04/ |
D | hw.h | 63 struct nvif_object *device = &nouveau_drm(dev)->device.object; in NVReadCRTC() local 67 val = nvif_rd32(device, reg); in NVReadCRTC() 74 struct nvif_object *device = &nouveau_drm(dev)->device.object; in NVWriteCRTC() local 77 nvif_wr32(device, reg, val); in NVWriteCRTC() 83 struct nvif_object *device = &nouveau_drm(dev)->device.object; in NVReadRAMDAC() local 87 val = nvif_rd32(device, reg); in NVReadRAMDAC() 94 struct nvif_object *device = &nouveau_drm(dev)->device.object; in NVWriteRAMDAC() local 97 nvif_wr32(device, reg, val); in NVWriteRAMDAC() 123 struct nvif_object *device = &nouveau_drm(dev)->device.object; in NVWriteVgaCrtc() local 124 nvif_wr08(device, NV_PRMCIO_CRX__COLOR + head * NV_PRMCIO_SIZE, index); in NVWriteVgaCrtc() [all …]
|
/linux-4.4.14/drivers/input/tablet/ |
D | gtco.c | 200 static void parse_hid_report_descriptor(struct gtco *device, char * report, in parse_hid_report_descriptor() argument 203 struct device *ddev = &device->intf->dev; in parse_hid_report_descriptor() 295 if (device->max_X == 0) { in parse_hid_report_descriptor() 296 device->max_X = globalval[TAG_GLOB_LOG_MAX]; in parse_hid_report_descriptor() 297 device->min_X = globalval[TAG_GLOB_LOG_MIN]; in parse_hid_report_descriptor() 303 if (device->max_Y == 0) { in parse_hid_report_descriptor() 304 device->max_Y = globalval[TAG_GLOB_LOG_MAX]; in parse_hid_report_descriptor() 305 device->min_Y = globalval[TAG_GLOB_LOG_MIN]; in parse_hid_report_descriptor() 312 if (device->maxtilt_X == 0) { in parse_hid_report_descriptor() 313 device->maxtilt_X = globalval[TAG_GLOB_LOG_MAX]; in parse_hid_report_descriptor() [all …]
|
/linux-4.4.14/Documentation/DocBook/ |
D | s390-drivers.xml.db | 15 API-struct-ccw-device 17 API-ccw-device-set-offline 18 API-ccw-device-set-online 23 API-ccw-device-siosl 24 API-ccw-device-set-options-mask 25 API-ccw-device-set-options 26 API-ccw-device-clear-options 27 API-ccw-device-is-pathgroup 28 API-ccw-device-is-multipath 29 API-ccw-device-clear [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bar/ |
D | nv50.c | 48 struct nvkm_device *device = bar->base.subdev.device; in nv50_bar_flush() local 51 nvkm_wr32(device, 0x00330c, 0x00000001); in nv50_bar_flush() 52 nvkm_msec(device, 2000, in nv50_bar_flush() 53 if (!(nvkm_rd32(device, 0x00330c) & 0x00000002)) in nv50_bar_flush() 63 struct nvkm_device *device = bar->base.subdev.device; in nv50_bar_oneinit() local 70 ret = nvkm_gpuobj_new(device, 0x20000, 0, false, NULL, &bar->mem); in nv50_bar_oneinit() 74 ret = nvkm_gpuobj_new(device, bar->pgd_addr, 0, false, bar->mem, in nv50_bar_oneinit() 79 ret = nvkm_gpuobj_new(device, 0x4000, 0, false, bar->mem, &bar->pgd); in nv50_bar_oneinit() 85 limit = start + device->func->resource_size(device, 3); in nv50_bar_oneinit() 87 ret = nvkm_vm_new(device, start, limit, start, &bar3_lock, &vm); in nv50_bar_oneinit() [all …]
|
/linux-4.4.14/arch/m68k/include/asm/ |
D | dma-mapping.h | 8 static inline int dma_supported(struct device *dev, u64 mask) in dma_supported() 13 static inline int dma_set_mask(struct device *dev, u64 mask) in dma_set_mask() 18 extern void *dma_alloc_coherent(struct device *, size_t, 20 extern void dma_free_coherent(struct device *, size_t, 23 static inline void *dma_alloc_attrs(struct device *dev, size_t size, in dma_alloc_attrs() 31 static inline void dma_free_attrs(struct device *dev, size_t size, in dma_free_attrs() 39 static inline void *dma_alloc_noncoherent(struct device *dev, size_t size, in dma_alloc_noncoherent() 44 static inline void dma_free_noncoherent(struct device *dev, size_t size, in dma_free_noncoherent() 49 static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size, in dma_cache_sync() 55 extern dma_addr_t dma_map_single(struct device *, void *, size_t, [all …]
|
/linux-4.4.14/include/linux/phy/ |
D | phy.h | 61 struct device dev; 79 struct device *dev; 82 struct phy * (*of_xlate)(struct device *dev, 130 struct phy *phy_get(struct device *dev, const char *string); 131 struct phy *phy_optional_get(struct device *dev, const char *string); 132 struct phy *devm_phy_get(struct device *dev, const char *string); 133 struct phy *devm_phy_optional_get(struct device *dev, const char *string); 134 struct phy *devm_of_phy_get(struct device *dev, struct device_node *np, 136 struct phy *devm_of_phy_get_by_index(struct device *dev, struct device_node *np, 139 void devm_phy_put(struct device *dev, struct phy *phy); [all …]
|
/linux-4.4.14/include/linux/regulator/ |
D | consumer.h | 40 struct device; 161 struct regulator *__must_check regulator_get(struct device *dev, 163 struct regulator *__must_check devm_regulator_get(struct device *dev, 165 struct regulator *__must_check regulator_get_exclusive(struct device *dev, 167 struct regulator *__must_check devm_regulator_get_exclusive(struct device *dev, 169 struct regulator *__must_check regulator_get_optional(struct device *dev, 171 struct regulator *__must_check devm_regulator_get_optional(struct device *dev, 176 int regulator_register_supply_alias(struct device *dev, const char *id, 177 struct device *alias_dev, 179 void regulator_unregister_supply_alias(struct device *dev, const char *id); [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/ |
D | nouveau_backlight.c | 43 struct nvif_object *device = &drm->device.object; in nv40_get_intensity() local 44 int val = (nvif_rd32(device, NV40_PMC_BACKLIGHT) & in nv40_get_intensity() 54 struct nvif_object *device = &drm->device.object; in nv40_set_intensity() local 56 int reg = nvif_rd32(device, NV40_PMC_BACKLIGHT); in nv40_set_intensity() 58 nvif_wr32(device, NV40_PMC_BACKLIGHT, in nv40_set_intensity() 74 struct nvif_object *device = &drm->device.object; in nv40_backlight_init() local 78 if (!(nvif_rd32(device, NV40_PMC_BACKLIGHT) & NV40_PMC_BACKLIGHT_MASK)) in nv40_backlight_init() 100 struct nvif_object *device = &drm->device.object; in nv50_get_intensity() local 105 val = nvif_rd32(device, NV50_PDISP_SOR_PWM_CTL(or)); in nv50_get_intensity() 115 struct nvif_object *device = &drm->device.object; in nv50_set_intensity() local [all …]
|
D | nouveau_hwmon.c | 39 nouveau_hwmon_show_temp(struct device *d, struct device_attribute *a, char *buf) in nouveau_hwmon_show_temp() 43 struct nvkm_therm *therm = nvxx_therm(&drm->device); in nouveau_hwmon_show_temp() 55 nouveau_hwmon_show_temp1_auto_point1_pwm(struct device *d, in nouveau_hwmon_show_temp1_auto_point1_pwm() 64 nouveau_hwmon_temp1_auto_point1_temp(struct device *d, in nouveau_hwmon_temp1_auto_point1_temp() 69 struct nvkm_therm *therm = nvxx_therm(&drm->device); in nouveau_hwmon_temp1_auto_point1_temp() 75 nouveau_hwmon_set_temp1_auto_point1_temp(struct device *d, in nouveau_hwmon_set_temp1_auto_point1_temp() 81 struct nvkm_therm *therm = nvxx_therm(&drm->device); in nouveau_hwmon_set_temp1_auto_point1_temp() 97 nouveau_hwmon_temp1_auto_point1_temp_hyst(struct device *d, in nouveau_hwmon_temp1_auto_point1_temp_hyst() 102 struct nvkm_therm *therm = nvxx_therm(&drm->device); in nouveau_hwmon_temp1_auto_point1_temp_hyst() 108 nouveau_hwmon_set_temp1_auto_point1_temp_hyst(struct device *d, in nouveau_hwmon_set_temp1_auto_point1_temp_hyst() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/mc/ |
D | gf100.c | 54 struct nvkm_device *device = mc->subdev.device; in gf100_mc_intr_unarm() local 55 nvkm_wr32(device, 0x000140, 0x00000000); in gf100_mc_intr_unarm() 56 nvkm_wr32(device, 0x000144, 0x00000000); in gf100_mc_intr_unarm() 57 nvkm_rd32(device, 0x000140); in gf100_mc_intr_unarm() 63 struct nvkm_device *device = mc->subdev.device; in gf100_mc_intr_rearm() local 64 nvkm_wr32(device, 0x000140, 0x00000001); in gf100_mc_intr_rearm() 65 nvkm_wr32(device, 0x000144, 0x00000001); in gf100_mc_intr_rearm() 71 struct nvkm_device *device = mc->subdev.device; in gf100_mc_intr_mask() local 72 u32 intr0 = nvkm_rd32(device, 0x000100); in gf100_mc_intr_mask() 73 u32 intr1 = nvkm_rd32(device, 0x000104); in gf100_mc_intr_mask() [all …]
|
D | nv04.c | 44 struct nvkm_device *device = mc->subdev.device; in nv04_mc_intr_unarm() local 45 nvkm_wr32(device, 0x000140, 0x00000000); in nv04_mc_intr_unarm() 46 nvkm_rd32(device, 0x000140); in nv04_mc_intr_unarm() 52 struct nvkm_device *device = mc->subdev.device; in nv04_mc_intr_rearm() local 53 nvkm_wr32(device, 0x000140, 0x00000001); in nv04_mc_intr_rearm() 59 return nvkm_rd32(mc->subdev.device, 0x000100); in nv04_mc_intr_mask() 65 struct nvkm_device *device = mc->subdev.device; in nv04_mc_init() local 66 nvkm_wr32(device, 0x000200, 0xffffffff); /* everything enabled */ in nv04_mc_init() 67 nvkm_wr32(device, 0x001850, 0x00000001); /* disable rom access */ in nv04_mc_init() 80 nv04_mc_new(struct nvkm_device *device, int index, struct nvkm_mc **pmc) in nv04_mc_new() argument [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/ |
D | nv10.c | 31 struct nvkm_device *device = gpio->subdev.device; in nv10_gpio_sense() local 34 line = nvkm_rd32(device, 0x600818) >> line; in nv10_gpio_sense() 39 line = nvkm_rd32(device, 0x60081c) >> line; in nv10_gpio_sense() 44 line = nvkm_rd32(device, 0x600850) >> line; in nv10_gpio_sense() 54 struct nvkm_device *device = gpio->subdev.device; in nv10_gpio_drive() local 78 nvkm_mask(device, reg, mask << line, data << line); in nv10_gpio_drive() 85 struct nvkm_device *device = gpio->subdev.device; in nv10_gpio_intr_stat() local 86 u32 intr = nvkm_rd32(device, 0x001104); in nv10_gpio_intr_stat() 87 u32 stat = nvkm_rd32(device, 0x001144) & intr; in nv10_gpio_intr_stat() 90 nvkm_wr32(device, 0x001104, intr); in nv10_gpio_intr_stat() [all …]
|
D | nv50.c | 29 struct nvkm_device *device = gpio->subdev.device; in nv50_gpio_reset() local 30 struct nvkm_bios *bios = device->bios; in nv50_gpio_reset() 53 nvkm_mask(device, reg, 0x00010001 << lsh, val << lsh); in nv50_gpio_reset() 73 struct nvkm_device *device = gpio->subdev.device; in nv50_gpio_drive() local 79 nvkm_mask(device, reg, 3 << shift, (((dir ^ 1) << 1) | out) << shift); in nv50_gpio_drive() 86 struct nvkm_device *device = gpio->subdev.device; in nv50_gpio_sense() local 92 return !!(nvkm_rd32(device, reg) & (4 << shift)); in nv50_gpio_sense() 98 struct nvkm_device *device = gpio->subdev.device; in nv50_gpio_intr_stat() local 99 u32 intr = nvkm_rd32(device, 0x00e054); in nv50_gpio_intr_stat() 100 u32 stat = nvkm_rd32(device, 0x00e050) & intr; in nv50_gpio_intr_stat() [all …]
|
D | g94.c | 29 struct nvkm_device *device = gpio->subdev.device; in g94_gpio_intr_stat() local 30 u32 intr0 = nvkm_rd32(device, 0x00e054); in g94_gpio_intr_stat() 31 u32 intr1 = nvkm_rd32(device, 0x00e074); in g94_gpio_intr_stat() 32 u32 stat0 = nvkm_rd32(device, 0x00e050) & intr0; in g94_gpio_intr_stat() 33 u32 stat1 = nvkm_rd32(device, 0x00e070) & intr1; in g94_gpio_intr_stat() 36 nvkm_wr32(device, 0x00e054, intr0); in g94_gpio_intr_stat() 37 nvkm_wr32(device, 0x00e074, intr1); in g94_gpio_intr_stat() 43 struct nvkm_device *device = gpio->subdev.device; in g94_gpio_intr_mask() local 44 u32 inte0 = nvkm_rd32(device, 0x00e050); in g94_gpio_intr_mask() 45 u32 inte1 = nvkm_rd32(device, 0x00e070); in g94_gpio_intr_mask() [all …]
|
D | gk104.c | 29 struct nvkm_device *device = gpio->subdev.device; in gk104_gpio_intr_stat() local 30 u32 intr0 = nvkm_rd32(device, 0x00dc00); in gk104_gpio_intr_stat() 31 u32 intr1 = nvkm_rd32(device, 0x00dc80); in gk104_gpio_intr_stat() 32 u32 stat0 = nvkm_rd32(device, 0x00dc08) & intr0; in gk104_gpio_intr_stat() 33 u32 stat1 = nvkm_rd32(device, 0x00dc88) & intr1; in gk104_gpio_intr_stat() 36 nvkm_wr32(device, 0x00dc00, intr0); in gk104_gpio_intr_stat() 37 nvkm_wr32(device, 0x00dc80, intr1); in gk104_gpio_intr_stat() 43 struct nvkm_device *device = gpio->subdev.device; in gk104_gpio_intr_mask() local 44 u32 inte0 = nvkm_rd32(device, 0x00dc08); in gk104_gpio_intr_mask() 45 u32 inte1 = nvkm_rd32(device, 0x00dc88); in gk104_gpio_intr_mask() [all …]
|
D | gf119.c | 29 struct nvkm_device *device = gpio->subdev.device; in gf119_gpio_reset() local 30 struct nvkm_bios *bios = device->bios; in gf119_gpio_reset() 49 nvkm_mask(device, 0x00d610 + (line * 4), 0xff, unk0); in gf119_gpio_reset() 51 nvkm_mask(device, 0x00d740 + (unk1 * 4), 0xff, line); in gf119_gpio_reset() 58 struct nvkm_device *device = gpio->subdev.device; in gf119_gpio_drive() local 60 nvkm_mask(device, 0x00d610 + (line * 4), 0x00003000, data); in gf119_gpio_drive() 61 nvkm_mask(device, 0x00d604, 0x00000001, 0x00000001); /* update? */ in gf119_gpio_drive() 68 struct nvkm_device *device = gpio->subdev.device; in gf119_gpio_sense() local 69 return !!(nvkm_rd32(device, 0x00d610 + (line * 4)) & 0x00004000); in gf119_gpio_sense() 83 gf119_gpio_new(struct nvkm_device *device, int index, struct nvkm_gpio **pgpio) in gf119_gpio_new() argument [all …]
|
/linux-4.4.14/sound/i2c/ |
D | i2c.c | 35 static int snd_i2c_bit_sendbytes(struct snd_i2c_device *device, 37 static int snd_i2c_bit_readbytes(struct snd_i2c_device *device, 51 struct snd_i2c_device *device; in snd_i2c_bus_free() local 56 device = snd_i2c_device(bus->devices.next); in snd_i2c_bus_free() 57 snd_i2c_device_free(device); in snd_i2c_bus_free() 73 static int snd_i2c_bus_dev_free(struct snd_device *device) in snd_i2c_bus_dev_free() argument 75 struct snd_i2c_bus *bus = device->device_data; in snd_i2c_bus_dev_free() 116 struct snd_i2c_device *device; in snd_i2c_device_create() local 121 device = kzalloc(sizeof(*device), GFP_KERNEL); in snd_i2c_device_create() 122 if (device == NULL) in snd_i2c_device_create() [all …]
|
D | cs8427.c | 60 int snd_cs8427_reg_write(struct snd_i2c_device *device, unsigned char reg, in snd_cs8427_reg_write() argument 68 if ((err = snd_i2c_sendbytes(device, buf, 2)) != 2) { in snd_cs8427_reg_write() 78 static int snd_cs8427_reg_read(struct snd_i2c_device *device, unsigned char reg) in snd_cs8427_reg_read() argument 83 if ((err = snd_i2c_sendbytes(device, ®, 1)) != 1) { in snd_cs8427_reg_read() 88 if ((err = snd_i2c_readbytes(device, &buf, 1)) != 1) { in snd_cs8427_reg_read() 96 static int snd_cs8427_select_corudata(struct snd_i2c_device *device, int udata) in snd_cs8427_select_corudata() argument 98 struct cs8427 *chip = device->private_data; in snd_cs8427_select_corudata() 105 err = snd_cs8427_reg_write(device, CS8427_REG_CSDATABUF, in snd_cs8427_select_corudata() 113 static int snd_cs8427_send_corudata(struct snd_i2c_device *device, in snd_cs8427_send_corudata() argument 118 struct cs8427 *chip = device->private_data; in snd_cs8427_send_corudata() [all …]
|
/linux-4.4.14/drivers/infiniband/ulp/iser/ |
D | iser_verbs.c | 69 event->device->name, event->element.port_num); in iser_event_handler() 79 static int iser_create_device_ib_res(struct iser_device *device) in iser_create_device_ib_res() argument 81 struct ib_device_attr *dev_attr = &device->dev_attr; in iser_create_device_ib_res() 84 ret = ib_query_device(device->ib_device, dev_attr); in iser_create_device_ib_res() 86 pr_warn("Query device failed for %s\n", device->ib_device->name); in iser_create_device_ib_res() 90 ret = iser_assign_reg_ops(device); in iser_create_device_ib_res() 94 device->comps_used = min_t(int, num_online_cpus(), in iser_create_device_ib_res() 95 device->ib_device->num_comp_vectors); in iser_create_device_ib_res() 97 device->comps = kcalloc(device->comps_used, sizeof(*device->comps), in iser_create_device_ib_res() 99 if (!device->comps) in iser_create_device_ib_res() [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/cipher/ |
D | g84.c | 37 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, in g84_cipher_oclass_bind() 59 return nvkm_gpuobj_new(object->engine->subdev.device, 256, in g84_cipher_cclass_bind() 83 struct nvkm_device *device = subdev->device; in g84_cipher_intr() local 84 struct nvkm_fifo *fifo = device->fifo; in g84_cipher_intr() 86 u32 stat = nvkm_rd32(device, 0x102130); in g84_cipher_intr() 87 u32 mthd = nvkm_rd32(device, 0x102190); in g84_cipher_intr() 88 u32 data = nvkm_rd32(device, 0x102194); in g84_cipher_intr() 89 u32 inst = nvkm_rd32(device, 0x102188) & 0x7fffffff; in g84_cipher_intr() 104 nvkm_wr32(device, 0x102130, stat); in g84_cipher_intr() 105 nvkm_wr32(device, 0x10200c, 0x10); in g84_cipher_intr() [all …]
|
/linux-4.4.14/arch/powerpc/sysdev/ |
D | axonram.c | 62 struct platform_device *device; member 72 axon_ram_sysfs_ecc(struct device *dev, struct device_attribute *attr, char *buf) in axon_ram_sysfs_ecc() 74 struct platform_device *device = to_platform_device(dev); in axon_ram_sysfs_ecc() local 75 struct axon_ram_bank *bank = device->dev.platform_data; in axon_ram_sysfs_ecc() 92 struct platform_device *device = dev; in axon_ram_irq_handler() local 93 struct axon_ram_bank *bank = device->dev.platform_data; in axon_ram_irq_handler() 97 dev_err(&device->dev, "Correctable memory error occurred\n"); in axon_ram_irq_handler() 144 axon_ram_direct_access(struct block_device *device, sector_t sector, in axon_ram_direct_access() argument 147 struct axon_ram_bank *bank = device->bd_disk->private_data; in axon_ram_direct_access() 166 static int axon_ram_probe(struct platform_device *device) in axon_ram_probe() argument [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bios/ |
D | shadowramin.c | 33 struct nvkm_device *device = bios->subdev.device; in pramin_read() local 37 *(u32 *)&bios->data[i] = nvkm_rd32(device, 0x700000 + i); in pramin_read() 48 struct nvkm_device *device = priv->bios->subdev.device; in pramin_fini() local 49 nvkm_wr32(device, 0x001700, priv->bar0); in pramin_fini() 58 struct nvkm_device *device = subdev->device; in pramin_init() local 63 if (device->card_type < NV_50) in pramin_init() 67 if (device->card_type >= GM100) in pramin_init() 68 addr = nvkm_rd32(device, 0x021c04); in pramin_init() 70 if (device->card_type >= NV_C0) in pramin_init() 71 addr = nvkm_rd32(device, 0x022500); in pramin_init() [all …]
|
/linux-4.4.14/drivers/vfio/ |
D | vfio.c | 68 struct device *dev; 80 struct device *dev; 91 struct device *dev; 205 struct device *dev; in vfio_create_group() 365 struct device *dev, in vfio_group_create_device() 369 struct vfio_device *device; in vfio_group_create_device() local 371 device = kzalloc(sizeof(*device), GFP_KERNEL); in vfio_group_create_device() 372 if (!device) in vfio_group_create_device() 375 kref_init(&device->kref); in vfio_group_create_device() 376 device->dev = dev; in vfio_group_create_device() [all …]
|
/linux-4.4.14/arch/parisc/include/asm/ |
D | dma-mapping.h | 10 int (*dma_supported)(struct device *dev, u64 mask); 11 void *(*alloc_consistent)(struct device *dev, size_t size, dma_addr_t *iova, gfp_t flag); 12 void *(*alloc_noncoherent)(struct device *dev, size_t size, dma_addr_t *iova, gfp_t flag); 13 void (*free_consistent)(struct device *dev, size_t size, void *vaddr, dma_addr_t iova); 14 …dma_addr_t (*map_single)(struct device *dev, void *addr, size_t size, enum dma_data_direction dire… 15 …void (*unmap_single)(struct device *dev, dma_addr_t iova, size_t size, enum dma_data_direction dir… 16 …int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dire… 17 …void (*unmap_sg)(struct device *dev, struct scatterlist *sg, int nhwents, enum dma_data_direction … 18 …void (*dma_sync_single_for_cpu)(struct device *dev, dma_addr_t iova, unsigned long offset, size_t … 19 …void (*dma_sync_single_for_device)(struct device *dev, dma_addr_t iova, unsigned long offset, size… [all …]
|
/linux-4.4.14/sound/soc/intel/atom/ |
D | sst-mfld-platform.h | 104 int (*open)(struct device *dev, 106 int (*stream_start)(struct device *dev, unsigned int str_id); 107 int (*stream_drop)(struct device *dev, unsigned int str_id); 108 int (*stream_drain)(struct device *dev, unsigned int str_id); 109 int (*stream_partial_drain)(struct device *dev, unsigned int str_id); 110 int (*stream_pause)(struct device *dev, unsigned int str_id); 111 int (*stream_pause_release)(struct device *dev, unsigned int str_id); 113 int (*tstamp)(struct device *dev, unsigned int str_id, 115 int (*ack)(struct device *dev, unsigned int str_id, 117 int (*close)(struct device *dev, unsigned int str_id); [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
D | nv41.c | 72 struct nvkm_device *device = mmu->base.subdev.device; in nv41_vm_flush() local 75 nvkm_wr32(device, 0x100810, 0x00000022); in nv41_vm_flush() 76 nvkm_msec(device, 2000, in nv41_vm_flush() 77 if (nvkm_rd32(device, 0x100810) & 0x00000020) in nv41_vm_flush() 80 nvkm_wr32(device, 0x100810, 0x00000000); in nv41_vm_flush() 92 struct nvkm_device *device = mmu->base.subdev.device; in nv41_mmu_oneinit() local 100 ret = nvkm_memory_new(device, NVKM_MEM_TARGET_INST, in nv41_mmu_oneinit() 111 struct nvkm_device *device = mmu->base.subdev.device; in nv41_mmu_init() local 113 nvkm_wr32(device, 0x100800, 0x00000002 | nvkm_memory_addr(dma)); in nv41_mmu_init() 114 nvkm_mask(device, 0x10008c, 0x00000100, 0x00000100); in nv41_mmu_init() [all …]
|
D | nv44.c | 147 struct nvkm_device *device = mmu->base.subdev.device; in nv44_vm_flush() local 148 nvkm_wr32(device, 0x100814, mmu->base.limit - NV44_GART_PAGE); in nv44_vm_flush() 149 nvkm_wr32(device, 0x100808, 0x00000020); in nv44_vm_flush() 150 nvkm_msec(device, 2000, in nv44_vm_flush() 151 if (nvkm_rd32(device, 0x100808) & 0x00000001) in nv44_vm_flush() 154 nvkm_wr32(device, 0x100808, 0x00000000); in nv44_vm_flush() 165 struct nvkm_device *device = mmu->base.subdev.device; in nv44_mmu_oneinit() local 168 mmu->nullp = dma_alloc_coherent(device->dev, 16 * 1024, in nv44_mmu_oneinit() 180 ret = nvkm_memory_new(device, NVKM_MEM_TARGET_INST, in nv44_mmu_oneinit() 192 struct nvkm_device *device = mmu->base.subdev.device; in nv44_mmu_init() local [all …]
|
/linux-4.4.14/drivers/nvdimm/ |
D | nd.h | 42 struct device *dev; 99 struct device dev; 104 struct device *ns_seed; 105 struct device *btt_seed; 106 struct device *pfn_seed; 118 int (*enable)(struct nvdimm_bus *nvdimm_bus, struct device *dev); 119 void (*disable)(struct nvdimm_bus *nvdimm_bus, struct device *dev); 138 struct device dev; 155 struct device dev; 168 void wait_nvdimm_bus_probe_idle(struct device *dev); [all …]
|
D | nd-core.h | 31 struct device dev; 40 struct device dev; 45 bool is_nvdimm(struct device *dev); 46 bool is_nd_pmem(struct device *dev); 47 bool is_nd_blk(struct device *dev); 48 struct nvdimm_bus *walk_to_nvdimm_bus(struct device *nd_dev); 51 void nd_region_probe_success(struct nvdimm_bus *nvdimm_bus, struct device *dev); 55 void nd_region_disable(struct nvdimm_bus *nvdimm_bus, struct device *dev); 62 void __nd_device_register(struct device *dev); 63 int nd_match_dimm(struct device *dev, void *data); [all …]
|
/linux-4.4.14/arch/frv/include/asm/ |
D | dma-mapping.h | 21 void *dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, gfp_t gfp); 22 void dma_free_coherent(struct device *dev, size_t size, void *vaddr, dma_addr_t dma_handle); 24 extern dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size, 28 void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, in dma_unmap_single() 34 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, 38 void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() 45 dma_addr_t dma_map_page(struct device *dev, struct page *page, unsigned long offset, 49 void dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, in dma_unmap_page() 57 void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size, in dma_sync_single_for_cpu() 63 void dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size, in dma_sync_single_for_device() [all …]
|
/linux-4.4.14/include/linux/gpio/ |
D | consumer.h | 8 struct device; 47 int gpiod_count(struct device *dev, const char *con_id); 50 struct gpio_desc *__must_check gpiod_get(struct device *dev, 53 struct gpio_desc *__must_check gpiod_get_index(struct device *dev, 57 struct gpio_desc *__must_check gpiod_get_optional(struct device *dev, 60 struct gpio_desc *__must_check gpiod_get_index_optional(struct device *dev, 64 struct gpio_descs *__must_check gpiod_get_array(struct device *dev, 67 struct gpio_descs *__must_check gpiod_get_array_optional(struct device *dev, 73 struct gpio_desc *__must_check devm_gpiod_get(struct device *dev, 76 struct gpio_desc *__must_check devm_gpiod_get_index(struct device *dev, [all …]
|
/linux-4.4.14/arch/cris/include/asm/ |
D | dma-mapping.h | 19 void *dma_alloc_coherent(struct device *dev, size_t size, 22 void dma_free_coherent(struct device *dev, size_t size, 26 dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, in dma_alloc_coherent() 34 dma_free_coherent(struct device *dev, size_t size, void *cpu_addr, in dma_free_coherent() 41 dma_map_single(struct device *dev, void *ptr, size_t size, in dma_map_single() 49 dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, in dma_unmap_single() 56 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() 64 dma_map_page(struct device *dev, struct page *page, unsigned long offset, in dma_map_page() 72 dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, in dma_unmap_page() 80 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() [all …]
|
/linux-4.4.14/include/linux/platform_data/ |
D | mmc-omap.h | 17 struct device *dev; 27 int (*switch_slot)(struct device *dev, int slot); 30 int (*init)(struct device *dev); 31 void (*cleanup)(struct device *dev); 32 void (*shutdown)(struct device *dev); 35 int (*get_context_loss_count)(struct device *dev); 89 int (*set_bus_mode)(struct device *dev, int slot, int bus_mode); 90 int (*set_power)(struct device *dev, int slot, 92 int (*get_ro)(struct device *dev, int slot); 93 void (*remux)(struct device *dev, int slot, int power_on); [all …]
|
/linux-4.4.14/include/linux/pinctrl/ |
D | consumer.h | 23 struct device; 33 extern struct pinctrl * __must_check pinctrl_get(struct device *dev); 40 extern struct pinctrl * __must_check devm_pinctrl_get(struct device *dev); 44 extern int pinctrl_pm_select_default_state(struct device *dev); 45 extern int pinctrl_pm_select_sleep_state(struct device *dev); 46 extern int pinctrl_pm_select_idle_state(struct device *dev); 48 static inline int pinctrl_pm_select_default_state(struct device *dev) in pinctrl_pm_select_default_state() 52 static inline int pinctrl_pm_select_sleep_state(struct device *dev) in pinctrl_pm_select_sleep_state() 56 static inline int pinctrl_pm_select_idle_state(struct device *dev) in pinctrl_pm_select_idle_state() 83 static inline struct pinctrl * __must_check pinctrl_get(struct device *dev) in pinctrl_get() [all …]
|
/linux-4.4.14/include/linux/mfd/ |
D | abx500.h | 16 struct device; 304 int ab8500_bm_of_probe(struct device *dev, 308 int abx500_set_register_interruptible(struct device *dev, u8 bank, u8 reg, 310 int abx500_get_register_interruptible(struct device *dev, u8 bank, u8 reg, 312 int abx500_get_register_page_interruptible(struct device *dev, u8 bank, 314 int abx500_set_register_page_interruptible(struct device *dev, u8 bank, 328 int abx500_mask_and_set_register_interruptible(struct device *dev, u8 bank, 330 int abx500_get_chip_id(struct device *dev); 331 int abx500_event_registers_startup_state_get(struct device *dev, u8 *event); 332 int abx500_startup_irq_enabled(struct device *dev, unsigned int irq); [all …]
|
/linux-4.4.14/include/asm-generic/ |
D | dma-mapping-broken.h | 12 dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle, 16 dma_free_coherent(struct device *dev, size_t size, void *cpu_addr, 19 static inline void *dma_alloc_attrs(struct device *dev, size_t size, in dma_alloc_attrs() 27 static inline void dma_free_attrs(struct device *dev, size_t size, in dma_free_attrs() 39 dma_map_single(struct device *dev, void *ptr, size_t size, 43 dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, 47 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, 51 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, 55 dma_map_page(struct device *dev, struct page *page, unsigned long offset, 59 dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size, [all …]
|
/linux-4.4.14/Documentation/device-mapper/ |
D | zero.txt | 4 Device-Mapper's "zero" target provides a block-device that always returns 6 /dev/zero, but as a block-device instead of a character-device. 11 conjunction with dm-snapshot. A sparse device reports a device-size larger 12 than the amount of actual storage space available for that device. A user can 13 write data anywhere within the sparse device and read it back like a normal 14 device. Reads to previously unwritten areas will return a zero'd buffer. When 16 device is deactivated. This can be very useful for testing device and 19 To create a sparse device, start by creating a dm-zero device that's the 20 desired size of the sparse device. For this example, we'll assume a 10TB 21 sparse device. [all …]
|
/linux-4.4.14/Documentation/driver-model/ |
D | device.txt | 5 See the kerneldoc for the struct device. 10 The bus driver that discovers the device uses this to register the 11 device with the core: 13 int device_register(struct device * dev); 22 A device is removed from the core when its reference count goes to 25 struct device * get_device(struct device * dev); 26 void put_device(struct device * dev); 28 get_device() will return a pointer to the struct device passed to it 32 A driver can access the lock in the device structure using: 34 void lock_device(struct device * dev); [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/ |
D | busgf119.c | 36 struct nvkm_device *device = bus->base.pad->i2c->subdev.device; in gf119_i2c_bus_drive_scl() local 37 nvkm_mask(device, bus->addr, 0x00000001, state ? 0x00000001 : 0); in gf119_i2c_bus_drive_scl() 44 struct nvkm_device *device = bus->base.pad->i2c->subdev.device; in gf119_i2c_bus_drive_sda() local 45 nvkm_mask(device, bus->addr, 0x00000002, state ? 0x00000002 : 0); in gf119_i2c_bus_drive_sda() 52 struct nvkm_device *device = bus->base.pad->i2c->subdev.device; in gf119_i2c_bus_sense_scl() local 53 return !!(nvkm_rd32(device, bus->addr) & 0x00000010); in gf119_i2c_bus_sense_scl() 60 struct nvkm_device *device = bus->base.pad->i2c->subdev.device; in gf119_i2c_bus_sense_sda() local 61 return !!(nvkm_rd32(device, bus->addr) & 0x00000020); in gf119_i2c_bus_sense_sda() 68 struct nvkm_device *device = bus->base.pad->i2c->subdev.device; in gf119_i2c_bus_init() local 69 nvkm_wr32(device, bus->addr, 0x00000007); in gf119_i2c_bus_init()
|
/linux-4.4.14/drivers/pci/pcie/ |
D | portdrv_pci.c | 82 static int pcie_port_resume_noirq(struct device *dev) in pcie_port_resume_noirq() 150 static int error_detected_iter(struct device *device, void *data) in error_detected_iter() argument 159 if (device->bus == &pcie_port_bus_type && device->driver) { in error_detected_iter() 160 driver = to_service_driver(device->driver); in error_detected_iter() 166 pcie_device = to_pcie_device(device); in error_detected_iter() 189 static int mmio_enabled_iter(struct device *device, void *data) in mmio_enabled_iter() argument 197 if (device->bus == &pcie_port_bus_type && device->driver) { in mmio_enabled_iter() 198 driver = to_service_driver(device->driver); in mmio_enabled_iter() 202 pcie_device = to_pcie_device(device); in mmio_enabled_iter() 223 static int slot_reset_iter(struct device *device, void *data) in slot_reset_iter() argument [all …]
|
/linux-4.4.14/Documentation/usb/ |
D | chipidea.txt | 23 The A-device(with micro A plug inserted) should enumrate B-device. 26 On B-device: 30 On A-device: 33 B-device should take host role and enumrate A-device. 35 4) A-device switch back to host. 36 On B-device: 39 A-device should switch back to host and enumrate B-device. 41 5) Remove B-device(unplug micro B plug) and insert again in 10 seconds, 42 A-device should enumrate B-device again. 44 6) Remove B-device(unplug micro B plug) and insert again after 10 seconds, [all …]
|
/linux-4.4.14/drivers/net/wireless/brcm80211/brcmfmac/ |
D | bus.h | 77 int (*preinit)(struct device *dev); 78 void (*stop)(struct device *dev); 79 int (*txdata)(struct device *dev, struct sk_buff *skb); 80 int (*txctl)(struct device *dev, unsigned char *msg, uint len); 81 int (*rxctl)(struct device *dev, unsigned char *msg, uint len); 82 struct pktq * (*gettxq)(struct device *dev); 83 void (*wowl_config)(struct device *dev, bool enabled); 84 size_t (*get_ramsize)(struct device *dev); 85 int (*get_memdump)(struct device *dev, void *data, size_t len); 130 struct device *dev; [all …]
|
/linux-4.4.14/arch/nios2/include/asm/ |
D | dma-mapping.h | 58 void *dma_alloc_coherent(struct device *dev, size_t size, 61 void dma_free_coherent(struct device *dev, size_t size, 64 static inline dma_addr_t dma_map_single(struct device *dev, void *ptr, in dma_map_single() 73 static inline void dma_unmap_single(struct device *dev, dma_addr_t dma_addr, in dma_unmap_single() 78 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, 80 extern dma_addr_t dma_map_page(struct device *dev, struct page *page, 82 extern void dma_unmap_page(struct device *dev, dma_addr_t dma_address, 84 extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg, 86 extern void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, 88 extern void dma_sync_single_for_device(struct device *dev, [all …]
|
/linux-4.4.14/arch/avr32/include/asm/ |
D | dma-mapping.h | 11 extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size, 20 static inline int dma_supported(struct device *dev, u64 mask) in dma_supported() 26 static inline int dma_set_mask(struct device *dev, u64 dma_mask) in dma_set_mask() 38 static inline int dma_mapping_error(struct device *dev, dma_addr_t addr) in dma_mapping_error() 54 extern void *dma_alloc_coherent(struct device *dev, size_t size, 70 extern void dma_free_coherent(struct device *dev, size_t size, 84 extern void *dma_alloc_writecombine(struct device *dev, size_t size, 100 extern void dma_free_writecombine(struct device *dev, size_t size, 117 dma_map_single(struct device *dev, void *cpu_addr, size_t size, in dma_map_single() 139 dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size, in dma_unmap_single() [all …]
|